Skip to content

release: 0.2.0-alpha.68 #481

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jul 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.2.0-alpha.67"
".": "0.2.0-alpha.68"
}
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,19 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).

## 0.2.0-alpha.68 (2025-07-11)

Full Changelog: [v0.2.0-alpha.67...v0.2.0-alpha.68](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.67...v0.2.0-alpha.68)

### Chores

* **readme:** fix version rendering on pypi ([4a1c3f3](https://github.com/openlayer-ai/openlayer-python/commit/4a1c3f3214500c461a1df35b3e33228af7a3c15e))


### Refactors

* Implement conditional imports for all integration modules ([#480](https://github.com/openlayer-ai/openlayer-python/issues/480)) ([bf99015](https://github.com/openlayer-ai/openlayer-python/commit/bf99015e798b18a5c8fb1da9c20321b239bad077))

## 0.2.0-alpha.67 (2025-07-10)

Full Changelog: [v0.2.0-alpha.66...v0.2.0-alpha.67](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.66...v0.2.0-alpha.67)
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Openlayer Python API library

[![PyPI version](<https://img.shields.io/pypi/v/openlayer.svg?label=pypi%20(stable)>)](https://pypi.org/project/openlayer/)
<!-- prettier-ignore -->
[![PyPI version](https://img.shields.io/pypi/v/openlayer.svg?label=pypi%20(stable))](https://pypi.org/project/openlayer/)

The Openlayer Python library provides convenient access to the Openlayer REST API from any Python 3.8+
application. The library includes type definitions for all request params and response fields,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "openlayer"
version = "0.2.0-alpha.67"
version = "0.2.0-alpha.68"
description = "The official Python library for the openlayer API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
2 changes: 1 addition & 1 deletion src/openlayer/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "openlayer"
__version__ = "0.2.0-alpha.67" # x-release-please-version
__version__ = "0.2.0-alpha.68" # x-release-please-version
24 changes: 18 additions & 6 deletions src/openlayer/lib/integrations/anthropic_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,25 @@
import logging
import time
from functools import wraps
from typing import Any, Dict, Iterator, Optional, Union
from typing import Any, Dict, Iterator, Optional, Union, TYPE_CHECKING

import anthropic
try:
import anthropic
HAVE_ANTHROPIC = True
except ImportError:
HAVE_ANTHROPIC = False

if TYPE_CHECKING:
import anthropic

from ..tracing import tracer

logger = logging.getLogger(__name__)


def trace_anthropic(
client: anthropic.Anthropic,
) -> anthropic.Anthropic:
client: "anthropic.Anthropic",
) -> "anthropic.Anthropic":
"""Patch the Anthropic client to trace chat completions.

The following information is collected for each chat completion:
Expand All @@ -42,6 +49,11 @@ def trace_anthropic(
anthropic.Anthropic
The patched Anthropic client.
"""
if not HAVE_ANTHROPIC:
raise ImportError(
"Anthropic library is not installed. Please install it with: pip install anthropic"
)

create_func = client.messages.create

@wraps(create_func)
Expand Down Expand Up @@ -180,7 +192,7 @@ def handle_non_streaming_create(
*args,
inference_id: Optional[str] = None,
**kwargs,
) -> anthropic.types.Message:
) -> "anthropic.types.Message":
"""Handles the create method when streaming is disabled.

Parameters
Expand Down Expand Up @@ -227,7 +239,7 @@ def handle_non_streaming_create(


def parse_non_streaming_output_data(
response: anthropic.types.Message,
response: "anthropic.types.Message",
) -> Union[str, Dict[str, Any], None]:
"""Parses the output data from a non-streaming completion.

Expand Down
20 changes: 16 additions & 4 deletions src/openlayer/lib/integrations/async_openai_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,16 @@
import logging
import time
from functools import wraps
from typing import Any, AsyncIterator, Optional, Union
from typing import Any, AsyncIterator, Optional, Union, TYPE_CHECKING

import openai
try:
import openai
HAVE_OPENAI = True
except ImportError:
HAVE_OPENAI = False

if TYPE_CHECKING:
import openai

from .openai_tracer import (
get_model_parameters,
Expand All @@ -19,8 +26,8 @@


def trace_async_openai(
client: Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI],
) -> Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]:
client: Union["openai.AsyncOpenAI", "openai.AsyncAzureOpenAI"],
) -> Union["openai.AsyncOpenAI", "openai.AsyncAzureOpenAI"]:
"""Patch the AsyncOpenAI or AsyncAzureOpenAI client to trace chat completions.

The following information is collected for each chat completion:
Expand All @@ -47,6 +54,11 @@ def trace_async_openai(
Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]
The patched AsyncOpenAI client.
"""
if not HAVE_OPENAI:
raise ImportError(
"OpenAI library is not installed. Please install it with: pip install openai"
)

is_azure_openai = isinstance(client, openai.AsyncAzureOpenAI)
create_func = client.chat.completions.create

Expand Down
20 changes: 16 additions & 4 deletions src/openlayer/lib/integrations/groq_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,25 @@
import logging
import time
from functools import wraps
from typing import Any, Dict, Iterator, Optional, Union
from typing import Any, Dict, Iterator, Optional, Union, TYPE_CHECKING

import groq
try:
import groq
HAVE_GROQ = True
except ImportError:
HAVE_GROQ = False

if TYPE_CHECKING:
import groq

from ..tracing import tracer

logger = logging.getLogger(__name__)


def trace_groq(
client: groq.Groq,
) -> groq.Groq:
client: "groq.Groq",
) -> "groq.Groq":
"""Patch the Groq client to trace chat completions.

The following information is collected for each chat completion:
Expand All @@ -42,6 +49,11 @@ def trace_groq(
groq.Groq
The patched Groq client.
"""
if not HAVE_GROQ:
raise ImportError(
"Groq library is not installed. Please install it with: pip install groq"
)

create_func = client.chat.completions.create

@wraps(create_func)
Expand Down
44 changes: 31 additions & 13 deletions src/openlayer/lib/integrations/langchain_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,19 @@

# pylint: disable=unused-argument
import time
from typing import Any, Dict, List, Optional, Union
from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING
from uuid import UUID

from langchain import schema as langchain_schema
from langchain.callbacks.base import BaseCallbackHandler
try:
from langchain import schema as langchain_schema
from langchain.callbacks.base import BaseCallbackHandler
HAVE_LANGCHAIN = True
except ImportError:
HAVE_LANGCHAIN = False

if TYPE_CHECKING:
from langchain import schema as langchain_schema
from langchain.callbacks.base import BaseCallbackHandler

from ..tracing import tracer, steps, traces, enums
from .. import utils
Expand All @@ -18,10 +26,20 @@
}


class OpenlayerHandler(BaseCallbackHandler):
if HAVE_LANGCHAIN:
BaseCallbackHandlerClass = BaseCallbackHandler
else:
BaseCallbackHandlerClass = object


class OpenlayerHandler(BaseCallbackHandlerClass): # type: ignore[misc]
"""LangChain callback handler that logs to Openlayer."""

def __init__(self, **kwargs: Any) -> None:
if not HAVE_LANGCHAIN:
raise ImportError(
"LangChain library is not installed. Please install it with: pip install langchain"
)
super().__init__()
self.metadata: Dict[str, Any] = kwargs or {}
self.steps: Dict[UUID, steps.Step] = {}
Expand Down Expand Up @@ -197,7 +215,7 @@ def _convert_step_objects_recursively(self, step: steps.Step) -> None:
def _convert_langchain_objects(self, obj: Any) -> Any:
"""Recursively convert LangChain objects to JSON-serializable format."""
# Explicit check for LangChain BaseMessage and its subclasses
if isinstance(obj, langchain_schema.BaseMessage):
if HAVE_LANGCHAIN and isinstance(obj, langchain_schema.BaseMessage):
return self._message_to_dict(obj)

# Handle ChatPromptValue objects which contain messages
Expand Down Expand Up @@ -249,7 +267,7 @@ def _convert_langchain_objects(self, obj: Any) -> Any:
# For everything else, convert to string
return str(obj)

def _message_to_dict(self, message: langchain_schema.BaseMessage) -> Dict[str, str]:
def _message_to_dict(self, message: "langchain_schema.BaseMessage") -> Dict[str, str]:
"""Convert a LangChain message to a JSON-serializable dictionary."""
message_type = getattr(message, "type", "user")

Expand All @@ -262,7 +280,7 @@ def _message_to_dict(self, message: langchain_schema.BaseMessage) -> Dict[str, s
return {"role": role, "content": str(message.content)}

def _messages_to_prompt_format(
self, messages: List[List[langchain_schema.BaseMessage]]
self, messages: List[List["langchain_schema.BaseMessage"]]
) -> List[Dict[str, str]]:
"""Convert LangChain messages to Openlayer prompt format using
unified conversion."""
Expand Down Expand Up @@ -302,7 +320,7 @@ def _extract_model_info(
}

def _extract_token_info(
self, response: langchain_schema.LLMResult
self, response: "langchain_schema.LLMResult"
) -> Dict[str, Any]:
"""Extract token information generically from LLM response."""
llm_output = response.llm_output or {}
Expand Down Expand Up @@ -340,7 +358,7 @@ def _extract_token_info(
"tokens": token_usage.get("total_tokens", 0),
}

def _extract_output(self, response: langchain_schema.LLMResult) -> str:
def _extract_output(self, response: "langchain_schema.LLMResult") -> str:
"""Extract output text from LLM response."""
output = ""
for generations in response.generations:
Expand Down Expand Up @@ -384,7 +402,7 @@ def on_llm_start(
def on_chat_model_start(
self,
serialized: Dict[str, Any],
messages: List[List[langchain_schema.BaseMessage]],
messages: List[List["langchain_schema.BaseMessage"]],
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
Expand Down Expand Up @@ -414,7 +432,7 @@ def on_chat_model_start(

def on_llm_end(
self,
response: langchain_schema.LLMResult,
response: "langchain_schema.LLMResult",
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
Expand Down Expand Up @@ -590,7 +608,7 @@ def on_text(self, text: str, **kwargs: Any) -> Any:

def on_agent_action(
self,
action: langchain_schema.AgentAction,
action: "langchain_schema.AgentAction",
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
Expand All @@ -612,7 +630,7 @@ def on_agent_action(

def on_agent_finish(
self,
finish: langchain_schema.AgentFinish,
finish: "langchain_schema.AgentFinish",
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
Expand Down
24 changes: 18 additions & 6 deletions src/openlayer/lib/integrations/mistral_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,25 @@
import logging
import time
from functools import wraps
from typing import Any, Dict, Iterator, Optional, Union
from typing import Any, Dict, Iterator, Optional, Union, TYPE_CHECKING

import mistralai
try:
import mistralai
HAVE_MISTRAL = True
except ImportError:
HAVE_MISTRAL = False

if TYPE_CHECKING:
import mistralai

from ..tracing import tracer

logger = logging.getLogger(__name__)


def trace_mistral(
client: mistralai.Mistral,
) -> mistralai.Mistral:
client: "mistralai.Mistral",
) -> "mistralai.Mistral":
"""Patch the Mistral client to trace chat completions.

The following information is collected for each chat completion:
Expand All @@ -42,6 +49,11 @@ def trace_mistral(
mistralai.Mistral
The patched Mistral client.
"""
if not HAVE_MISTRAL:
raise ImportError(
"Mistral library is not installed. Please install it with: pip install mistralai"
)

stream_func = client.chat.stream
create_func = client.chat.complete

Expand Down Expand Up @@ -184,7 +196,7 @@ def handle_non_streaming_create(
*args,
inference_id: Optional[str] = None,
**kwargs,
) -> mistralai.models.ChatCompletionResponse:
) -> "mistralai.models.ChatCompletionResponse":
"""Handles the create method when streaming is disabled.

Parameters
Expand Down Expand Up @@ -231,7 +243,7 @@ def handle_non_streaming_create(


def parse_non_streaming_output_data(
response: mistralai.models.ChatCompletionResponse,
response: "mistralai.models.ChatCompletionResponse",
) -> Union[str, Dict[str, Any], None]:
"""Parses the output data from a non-streaming completion.

Expand Down
Loading
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy