From a30e0fdcdb9a6c4a9069c1bb9ae7c84f59d3a67a Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:54:57 -0300 Subject: [PATCH 1/8] feat(tests): add integration tests for conditional imports in modules - Introduced a new test suite to validate that integration modules handle optional dependencies correctly. - Ensured modules can be imported when dependencies are missing and provide helpful error messages. - Verified that all integration modules exist and can be imported when dependencies are available. - Implemented comprehensive checks for availability flags and graceful import handling. - This addition prevents regressions in conditional import handling across all integrations. --- tests/test_integration_conditional_imports.py | 315 ++++++++++++++++++ 1 file changed, 315 insertions(+) create mode 100644 tests/test_integration_conditional_imports.py diff --git a/tests/test_integration_conditional_imports.py b/tests/test_integration_conditional_imports.py new file mode 100644 index 00000000..956c47e5 --- /dev/null +++ b/tests/test_integration_conditional_imports.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python3 +""" +CI Test: Integration modules conditional import handling. + +This test ensures that all integration modules in src/openlayer/lib/integrations/ +handle optional dependencies correctly: +1. Can be imported when dependency is not available +2. Provide helpful error messages when trying to use without dependency +3. Do not have type annotation errors +4. Follow consistent patterns for conditional imports + +This prevents regressions in conditional import handling across all integrations. +""" + +import sys +import subprocess +import tempfile +import textwrap +from pathlib import Path +from typing import List, Tuple + +# Note: pytest is imported automatically when running via pytest +# This file can also be run standalone for manual testing + + +# Mapping of integration modules to their optional dependencies +INTEGRATION_DEPENDENCIES = { + "openai_agents": ["agents"], + "openai_tracer": ["openai"], + "async_openai_tracer": ["openai"], + "anthropic_tracer": ["anthropic"], + "mistral_tracer": ["mistralai"], + "groq_tracer": ["groq"], + "langchain_callback": ["langchain", "langchain_core", "langchain_community"], +} + +# Expected patterns for integration modules +EXPECTED_PATTERNS = { + "availability_flag": True, # Should have HAVE_ flag + "helpful_error": True, # Should give helpful error when instantiating without dependency + "graceful_import": True, # Should import without errors when dependency missing +} + + +def create_import_blocker_script(blocked_packages: List[str]) -> str: + """Create a script that blocks specific package imports.""" + blocked_packages_str = ", ".join(f'"{pkg}"' for pkg in blocked_packages) + + return textwrap.dedent(f""" + import sys + import builtins + from typing import Any + + # Store original import function + original_import = builtins.__import__ + + def blocking_import(name: str, *args: Any, **kwargs: Any) -> Any: + '''Block imports of specific packages for testing.''' + blocked_packages = [{blocked_packages_str}] + + # Check if this import should be blocked + for blocked_pkg in blocked_packages: + if name == blocked_pkg or name.startswith(blocked_pkg + "."): + raise ImportError(f"No module named '{{name}}' (blocked for testing)") + + # Allow all other imports + return original_import(name, *args, **kwargs) + + # Install the import blocker + builtins.__import__ = blocking_import + """) + + +def create_integration_test_script(module_name: str, blocked_packages: List[str]) -> str: + """Create a test script for a specific integration module.""" + return textwrap.dedent(f""" + import sys + import os + from pathlib import Path + + # Add src directory to path + src_path = Path.cwd() / "src" + sys.path.insert(0, str(src_path)) + + def test_integration_module(): + '''Test integration module with blocked dependencies.''' + module_name = "{module_name}" + blocked_packages = {blocked_packages} + + print(f"๐Ÿงช Testing {{module_name}} without {{blocked_packages}}...") + + try: + # Try to import the integration module + import_path = f"openlayer.lib.integrations.{{module_name}}" + module = __import__(import_path, fromlist=[module_name]) + + print(f"โœ… Module {{module_name}} imported successfully") + + # Check for availability flag pattern + availability_flags = [attr for attr in dir(module) + if attr.startswith('HAVE_') and + isinstance(getattr(module, attr), bool)] + + if availability_flags: + for flag in availability_flags: + flag_value = getattr(module, flag) + print(f"โœ… Found availability flag: {{flag}} = {{flag_value}}") + if flag_value: + print(f"โš ๏ธ WARNING: {{flag}} is True, but dependencies are blocked!") + else: + print(f"โš ๏ธ WARNING: No availability flag found (HAVE_* pattern)") + + # Try to find main integration classes (skip utility classes) + integration_classes = [] + for attr_name in dir(module): + attr = getattr(module, attr_name) + if (isinstance(attr, type) and + attr.__module__ == module.__name__ and + not attr_name.startswith('_') and + # Skip utility classes that aren't integration points + not attr_name.endswith('Data') and + # Look for typical integration class patterns + ('Tracer' in attr_name or 'Processor' in attr_name or 'Callback' in attr_name)): + integration_classes.append((attr_name, attr)) + + if not integration_classes: + print("โš ๏ธ WARNING: No integration classes found") + return True + + # Test instantiation of integration classes + for class_name, integration_class in integration_classes: + try: + print(f"๐Ÿงช Testing instantiation of {{class_name}}...") + instance = integration_class() + print(f"โŒ FAIL: {{class_name}} instantiation should have failed without dependencies") + return False + except ImportError as e: + expected_keywords = ["required", "install", "pip install"] + error_msg = str(e).lower() + if any(keyword in error_msg for keyword in expected_keywords): + print(f"โœ… {{class_name}} failed with helpful error: {{e}}") + else: + print(f"โš ๏ธ {{class_name}} failed but error message could be more helpful: {{e}}") + except Exception as e: + print(f"โŒ FAIL: {{class_name}} failed with unexpected error: {{e}}") + return False + + print(f"โœ… All tests passed for {{module_name}}") + return True + + except ImportError as e: + print(f"โŒ FAIL: Could not import {{module_name}}: {{e}}") + return False + except Exception as e: + print(f"โŒ FAIL: Unexpected error testing {{module_name}}: {{e}}") + import traceback + traceback.print_exc() + return False + + if __name__ == "__main__": + success = test_integration_module() + sys.exit(0 if success else 1) + """) + + +def run_integration_test(module_name: str, dependencies: List[str]) -> Tuple[bool, str]: + """Run the integration test for a specific module.""" + # Create temporary files for the test + with tempfile.NamedTemporaryFile(mode='w', suffix='.py', delete=False) as blocker_file: + blocker_file.write(create_import_blocker_script(dependencies)) + blocker_script = blocker_file.name + + with tempfile.NamedTemporaryFile(mode='w', suffix='.py', delete=False) as test_file: + test_file.write(create_integration_test_script(module_name, dependencies)) + test_script = test_file.name + + try: + # Run the test in a subprocess + cmd = [ + sys.executable, + '-c', + f"exec(open('{blocker_script}').read()); exec(open('{test_script}').read())" + ] + + result = subprocess.run( + cmd, + cwd=Path.cwd(), + capture_output=True, + text=True, + timeout=30 + ) + + output = result.stdout + if result.stderr: + output += f"\nSTDERR:\n{result.stderr}" + + return result.returncode == 0, output + + except subprocess.TimeoutExpired: + return False, "Test timed out" + except Exception as e: + return False, f"Test execution failed: {e}" + finally: + # Clean up temporary files + try: + Path(blocker_script).unlink() + Path(test_script).unlink() + except: + pass + + +class TestIntegrationConditionalImports: + """Test class for integration conditional imports.""" + + def test_all_integrations_handle_missing_dependencies(self) -> None: + """Test that all integration modules handle missing dependencies correctly.""" + print("\n๐Ÿš€ Testing all integration modules for conditional import handling...") + + failed_modules: List[str] = [] + all_results: List[Tuple[str, bool, str]] = [] + + for module_name, dependencies in INTEGRATION_DEPENDENCIES.items(): + print(f"\n{'='*60}") + print(f"Testing: {module_name}") + print(f"Blocked dependencies: {dependencies}") + print('='*60) + + success, output = run_integration_test(module_name, dependencies) + + print(output) + + if not success: + failed_modules.append(module_name) + print(f"โŒ FAILED: {module_name}") + else: + print(f"โœ… PASSED: {module_name}") + + all_results.append((module_name, success, output)) + + # Summary + print(f"\n{'='*60}") + print("SUMMARY") + print('='*60) + + total_modules = len(INTEGRATION_DEPENDENCIES) + passed_modules = total_modules - len(failed_modules) + + print(f"Total modules tested: {total_modules}") + print(f"Passed: {passed_modules}") + print(f"Failed: {len(failed_modules)}") + + if failed_modules: + print(f"\nFailed modules: {', '.join(failed_modules)}") + + # Show details for failed modules + for module_name, success, output in all_results: + if not success: + print(f"\n--- {module_name} failure details ---") + print(output) + + # Assert all modules passed + assert len(failed_modules) == 0, f"The following modules failed conditional import tests: {failed_modules}" + + def test_integration_modules_exist(self) -> None: + """Test that all expected integration modules exist.""" + integrations_dir = Path("src/openlayer/lib/integrations") + + for module_name in INTEGRATION_DEPENDENCIES.keys(): + module_file = integrations_dir / f"{module_name}.py" + assert module_file.exists(), f"Integration module {module_name}.py does not exist" + + def test_can_import_integrations_when_dependencies_available(self) -> None: + """Test that integration modules can be imported when their dependencies are available.""" + print("\n๐Ÿงช Testing integration imports when dependencies are available...") + + # This test runs in the normal environment where dependencies may be available + failed_imports: List[str] = [] + + for module_name in INTEGRATION_DEPENDENCIES.keys(): + try: + import_path = f"openlayer.lib.integrations.{module_name}" + __import__(import_path) + print(f"โœ… {module_name} imported successfully") + except ImportError as e: + # This is expected if the dependency is not installed + print(f"โš ๏ธ {module_name} import failed (dependency not installed): {e}") + except Exception as e: + print(f"โŒ {module_name} import failed with unexpected error: {e}") + failed_imports.append(module_name) + + assert len(failed_imports) == 0, f"Unexpected import errors: {failed_imports}" + + +if __name__ == "__main__": + # Run the tests when called directly + test_instance = TestIntegrationConditionalImports() + + print("๐Ÿงช Running Integration Conditional Import Tests") + print("=" * 60) + + try: + test_instance.test_integration_modules_exist() + print("โœ… All integration modules exist") + + test_instance.test_can_import_integrations_when_dependencies_available() + print("โœ… Integration imports work when dependencies available") + + test_instance.test_all_integrations_handle_missing_dependencies() + print("โœ… All integration modules handle missing dependencies correctly") + + print("\n๐ŸŽ‰ All tests passed!") + + except Exception as e: + print(f"\n๐Ÿ’ฅ Test failed: {e}") + sys.exit(1) \ No newline at end of file From 3229f412b13c07a2dbac922782e432afcdd28c1c Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:55:19 -0300 Subject: [PATCH 2/8] feat(tracer): enhance conditional imports and type hinting for Anthropic integration - Implemented conditional import handling for the `anthropic` library, allowing for graceful degradation when the library is not installed. - Added type hints for `anthropic` types using forward references to improve code clarity and maintainability. - Introduced an informative error message when the `anthropic` library is missing, guiding users on how to install it. - This update ensures better compatibility and user experience when working with optional dependencies in the Anthropic integration. --- .../lib/integrations/anthropic_tracer.py | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/openlayer/lib/integrations/anthropic_tracer.py b/src/openlayer/lib/integrations/anthropic_tracer.py index d14a5f4b..054a89b2 100644 --- a/src/openlayer/lib/integrations/anthropic_tracer.py +++ b/src/openlayer/lib/integrations/anthropic_tracer.py @@ -4,9 +4,16 @@ import logging import time from functools import wraps -from typing import Any, Dict, Iterator, Optional, Union +from typing import Any, Dict, Iterator, Optional, Union, TYPE_CHECKING -import anthropic +try: + import anthropic + HAVE_ANTHROPIC = True +except ImportError: + HAVE_ANTHROPIC = False + +if TYPE_CHECKING: + import anthropic from ..tracing import tracer @@ -14,8 +21,8 @@ def trace_anthropic( - client: anthropic.Anthropic, -) -> anthropic.Anthropic: + client: "anthropic.Anthropic", +) -> "anthropic.Anthropic": """Patch the Anthropic client to trace chat completions. The following information is collected for each chat completion: @@ -42,6 +49,11 @@ def trace_anthropic( anthropic.Anthropic The patched Anthropic client. """ + if not HAVE_ANTHROPIC: + raise ImportError( + "Anthropic library is not installed. Please install it with: pip install anthropic" + ) + create_func = client.messages.create @wraps(create_func) @@ -180,7 +192,7 @@ def handle_non_streaming_create( *args, inference_id: Optional[str] = None, **kwargs, -) -> anthropic.types.Message: +) -> "anthropic.types.Message": """Handles the create method when streaming is disabled. Parameters @@ -227,7 +239,7 @@ def handle_non_streaming_create( def parse_non_streaming_output_data( - response: anthropic.types.Message, + response: "anthropic.types.Message", ) -> Union[str, Dict[str, Any], None]: """Parses the output data from a non-streaming completion. From eda36c017068d6375389f8b6d1ae610328c7d12e Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:55:35 -0300 Subject: [PATCH 3/8] feat(tracer): improve conditional imports and type hinting for OpenAI integration - Implemented conditional import handling for the `openai` library, allowing for graceful degradation when the library is not installed. - Enhanced type hints using forward references for `openai` types to improve code clarity and maintainability. - Introduced informative error messages when the `openai` library is missing, guiding users on how to install it. - This update ensures better compatibility and user experience when working with optional dependencies in the OpenAI integration. --- .../lib/integrations/openai_tracer.py | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/src/openlayer/lib/integrations/openai_tracer.py b/src/openlayer/lib/integrations/openai_tracer.py index 3d8773c5..0c787aa2 100644 --- a/src/openlayer/lib/integrations/openai_tracer.py +++ b/src/openlayer/lib/integrations/openai_tracer.py @@ -4,9 +4,16 @@ import logging import time from functools import wraps -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import Any, Dict, Iterator, List, Optional, Union, TYPE_CHECKING -import openai +try: + import openai + HAVE_OPENAI = True +except ImportError: + HAVE_OPENAI = False + +if TYPE_CHECKING: + import openai from ..tracing import tracer @@ -14,8 +21,8 @@ def trace_openai( - client: Union[openai.OpenAI, openai.AzureOpenAI], -) -> Union[openai.OpenAI, openai.AzureOpenAI]: + client: Union["openai.OpenAI", "openai.AzureOpenAI"], +) -> Union["openai.OpenAI", "openai.AzureOpenAI"]: """Patch the OpenAI or AzureOpenAI client to trace chat completions. The following information is collected for each chat completion: @@ -42,6 +49,11 @@ def trace_openai( Union[openai.OpenAI, openai.AzureOpenAI] The patched OpenAI client. """ + if not HAVE_OPENAI: + raise ImportError( + "OpenAI library is not installed. Please install it with: pip install openai" + ) + is_azure_openai = isinstance(client, openai.AzureOpenAI) create_func = client.chat.completions.create @@ -358,12 +370,17 @@ def parse_non_streaming_output_data( # --------------------------- OpenAI Assistants API -------------------------- # def trace_openai_assistant_thread_run( - client: openai.OpenAI, run: "openai.types.beta.threads.run.Run" + client: "openai.OpenAI", run: "openai.types.beta.threads.run.Run" ) -> None: """Trace a run from an OpenAI assistant. Once the run is completed, the thread data is published to Openlayer, along with the latency, and number of tokens used.""" + if not HAVE_OPENAI: + raise ImportError( + "OpenAI library is not installed. Please install it with: pip install openai" + ) + _type_check_run(run) # Do nothing if the run is not completed @@ -398,7 +415,7 @@ def trace_openai_assistant_thread_run( def _type_check_run(run: "openai.types.beta.threads.run.Run") -> None: """Validate the run object.""" - if not isinstance(run, openai.types.beta.threads.run.Run): + if HAVE_OPENAI and not isinstance(run, openai.types.beta.threads.run.Run): raise ValueError(f"Expected a Run object, but got {type(run)}.") From 9801e916acc5cff4ec9cd9badc0a5e7c0c267dde Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:55:50 -0300 Subject: [PATCH 4/8] feat(tracer): enhance conditional imports and type hinting for Mistral integration - Implemented conditional import handling for the `mistralai` library, allowing for graceful degradation when the library is not installed. - Improved type hints using forward references for `mistralai` types to enhance code clarity and maintainability. - Introduced an informative error message when the `mistralai` library is missing, guiding users on how to install it. - This update ensures better compatibility and user experience when working with optional dependencies in the Mistral integration. --- .../lib/integrations/mistral_tracer.py | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/openlayer/lib/integrations/mistral_tracer.py b/src/openlayer/lib/integrations/mistral_tracer.py index b536ca39..5939c50e 100644 --- a/src/openlayer/lib/integrations/mistral_tracer.py +++ b/src/openlayer/lib/integrations/mistral_tracer.py @@ -4,9 +4,16 @@ import logging import time from functools import wraps -from typing import Any, Dict, Iterator, Optional, Union +from typing import Any, Dict, Iterator, Optional, Union, TYPE_CHECKING -import mistralai +try: + import mistralai + HAVE_MISTRAL = True +except ImportError: + HAVE_MISTRAL = False + +if TYPE_CHECKING: + import mistralai from ..tracing import tracer @@ -14,8 +21,8 @@ def trace_mistral( - client: mistralai.Mistral, -) -> mistralai.Mistral: + client: "mistralai.Mistral", +) -> "mistralai.Mistral": """Patch the Mistral client to trace chat completions. The following information is collected for each chat completion: @@ -42,6 +49,11 @@ def trace_mistral( mistralai.Mistral The patched Mistral client. """ + if not HAVE_MISTRAL: + raise ImportError( + "Mistral library is not installed. Please install it with: pip install mistralai" + ) + stream_func = client.chat.stream create_func = client.chat.complete @@ -184,7 +196,7 @@ def handle_non_streaming_create( *args, inference_id: Optional[str] = None, **kwargs, -) -> mistralai.models.ChatCompletionResponse: +) -> "mistralai.models.ChatCompletionResponse": """Handles the create method when streaming is disabled. Parameters @@ -231,7 +243,7 @@ def handle_non_streaming_create( def parse_non_streaming_output_data( - response: mistralai.models.ChatCompletionResponse, + response: "mistralai.models.ChatCompletionResponse", ) -> Union[str, Dict[str, Any], None]: """Parses the output data from a non-streaming completion. From 7639007991f971100891a8b8b5367553f51bb71b Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:56:54 -0300 Subject: [PATCH 5/8] feat(tracer): enhance conditional imports and type hinting for Groq integration - Implemented conditional import handling for the `groq` library, allowing for graceful degradation when the library is not installed. - Improved type hints using forward references for `groq` types to enhance code clarity and maintainability. - Introduced an informative error message when the `groq` library is missing, guiding users on how to install it. - This update ensures better compatibility and user experience when working with optional dependencies in the Groq integration. --- src/openlayer/lib/integrations/groq_tracer.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/openlayer/lib/integrations/groq_tracer.py b/src/openlayer/lib/integrations/groq_tracer.py index bc40b1d8..fc359427 100644 --- a/src/openlayer/lib/integrations/groq_tracer.py +++ b/src/openlayer/lib/integrations/groq_tracer.py @@ -4,9 +4,16 @@ import logging import time from functools import wraps -from typing import Any, Dict, Iterator, Optional, Union +from typing import Any, Dict, Iterator, Optional, Union, TYPE_CHECKING -import groq +try: + import groq + HAVE_GROQ = True +except ImportError: + HAVE_GROQ = False + +if TYPE_CHECKING: + import groq from ..tracing import tracer @@ -14,8 +21,8 @@ def trace_groq( - client: groq.Groq, -) -> groq.Groq: + client: "groq.Groq", +) -> "groq.Groq": """Patch the Groq client to trace chat completions. The following information is collected for each chat completion: @@ -42,6 +49,11 @@ def trace_groq( groq.Groq The patched Groq client. """ + if not HAVE_GROQ: + raise ImportError( + "Groq library is not installed. Please install it with: pip install groq" + ) + create_func = client.chat.completions.create @wraps(create_func) From d521c4bcdcf6533fe36179a047f7e910d2117a4b Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:57:09 -0300 Subject: [PATCH 6/8] feat(tracer): enhance conditional imports and type hinting for OpenAI integration - Improved conditional import handling for the `openai` library, ensuring graceful degradation when the library is not installed. - Enhanced type hints using forward references for `openai` types to improve code clarity and maintainability. - Added an informative error message when the `openai` library is missing, guiding users on how to install it. - This update ensures better compatibility and user experience when working with optional dependencies in the OpenAI integration. --- .../lib/integrations/async_openai_tracer.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/openlayer/lib/integrations/async_openai_tracer.py b/src/openlayer/lib/integrations/async_openai_tracer.py index 4f1cfb94..f670fa16 100644 --- a/src/openlayer/lib/integrations/async_openai_tracer.py +++ b/src/openlayer/lib/integrations/async_openai_tracer.py @@ -4,9 +4,16 @@ import logging import time from functools import wraps -from typing import Any, AsyncIterator, Optional, Union +from typing import Any, AsyncIterator, Optional, Union, TYPE_CHECKING -import openai +try: + import openai + HAVE_OPENAI = True +except ImportError: + HAVE_OPENAI = False + +if TYPE_CHECKING: + import openai from .openai_tracer import ( get_model_parameters, @@ -19,8 +26,8 @@ def trace_async_openai( - client: Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI], -) -> Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]: + client: Union["openai.AsyncOpenAI", "openai.AsyncAzureOpenAI"], +) -> Union["openai.AsyncOpenAI", "openai.AsyncAzureOpenAI"]: """Patch the AsyncOpenAI or AsyncAzureOpenAI client to trace chat completions. The following information is collected for each chat completion: @@ -47,6 +54,11 @@ def trace_async_openai( Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI] The patched AsyncOpenAI client. """ + if not HAVE_OPENAI: + raise ImportError( + "OpenAI library is not installed. Please install it with: pip install openai" + ) + is_azure_openai = isinstance(client, openai.AsyncAzureOpenAI) create_func = client.chat.completions.create From 23dc35d1a20081c23ef149757ea223e5f351baa3 Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 12:58:19 -0300 Subject: [PATCH 7/8] feat(langchain): enhance conditional imports and type hinting for LangChain integration - Implemented conditional import handling for the `langchain` library, allowing for graceful degradation when the library is not installed. - Improved type hints using forward references for `langchain` types to enhance code clarity and maintainability. - Introduced an informative error message when the `langchain` library is missing, guiding users on how to install it. - This update ensures better compatibility and user experience when working with optional dependencies in the LangChain integration. --- .../lib/integrations/langchain_callback.py | 44 +++++++++++++------ 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/src/openlayer/lib/integrations/langchain_callback.py b/src/openlayer/lib/integrations/langchain_callback.py index 8f5dfd3f..e21239b4 100644 --- a/src/openlayer/lib/integrations/langchain_callback.py +++ b/src/openlayer/lib/integrations/langchain_callback.py @@ -2,11 +2,19 @@ # pylint: disable=unused-argument import time -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING from uuid import UUID -from langchain import schema as langchain_schema -from langchain.callbacks.base import BaseCallbackHandler +try: + from langchain import schema as langchain_schema + from langchain.callbacks.base import BaseCallbackHandler + HAVE_LANGCHAIN = True +except ImportError: + HAVE_LANGCHAIN = False + +if TYPE_CHECKING: + from langchain import schema as langchain_schema + from langchain.callbacks.base import BaseCallbackHandler from ..tracing import tracer, steps, traces, enums from .. import utils @@ -18,10 +26,20 @@ } -class OpenlayerHandler(BaseCallbackHandler): +if HAVE_LANGCHAIN: + BaseCallbackHandlerClass = BaseCallbackHandler +else: + BaseCallbackHandlerClass = object + + +class OpenlayerHandler(BaseCallbackHandlerClass): # type: ignore[misc] """LangChain callback handler that logs to Openlayer.""" def __init__(self, **kwargs: Any) -> None: + if not HAVE_LANGCHAIN: + raise ImportError( + "LangChain library is not installed. Please install it with: pip install langchain" + ) super().__init__() self.metadata: Dict[str, Any] = kwargs or {} self.steps: Dict[UUID, steps.Step] = {} @@ -197,7 +215,7 @@ def _convert_step_objects_recursively(self, step: steps.Step) -> None: def _convert_langchain_objects(self, obj: Any) -> Any: """Recursively convert LangChain objects to JSON-serializable format.""" # Explicit check for LangChain BaseMessage and its subclasses - if isinstance(obj, langchain_schema.BaseMessage): + if HAVE_LANGCHAIN and isinstance(obj, langchain_schema.BaseMessage): return self._message_to_dict(obj) # Handle ChatPromptValue objects which contain messages @@ -249,7 +267,7 @@ def _convert_langchain_objects(self, obj: Any) -> Any: # For everything else, convert to string return str(obj) - def _message_to_dict(self, message: langchain_schema.BaseMessage) -> Dict[str, str]: + def _message_to_dict(self, message: "langchain_schema.BaseMessage") -> Dict[str, str]: """Convert a LangChain message to a JSON-serializable dictionary.""" message_type = getattr(message, "type", "user") @@ -262,7 +280,7 @@ def _message_to_dict(self, message: langchain_schema.BaseMessage) -> Dict[str, s return {"role": role, "content": str(message.content)} def _messages_to_prompt_format( - self, messages: List[List[langchain_schema.BaseMessage]] + self, messages: List[List["langchain_schema.BaseMessage"]] ) -> List[Dict[str, str]]: """Convert LangChain messages to Openlayer prompt format using unified conversion.""" @@ -302,7 +320,7 @@ def _extract_model_info( } def _extract_token_info( - self, response: langchain_schema.LLMResult + self, response: "langchain_schema.LLMResult" ) -> Dict[str, Any]: """Extract token information generically from LLM response.""" llm_output = response.llm_output or {} @@ -340,7 +358,7 @@ def _extract_token_info( "tokens": token_usage.get("total_tokens", 0), } - def _extract_output(self, response: langchain_schema.LLMResult) -> str: + def _extract_output(self, response: "langchain_schema.LLMResult") -> str: """Extract output text from LLM response.""" output = "" for generations in response.generations: @@ -384,7 +402,7 @@ def on_llm_start( def on_chat_model_start( self, serialized: Dict[str, Any], - messages: List[List[langchain_schema.BaseMessage]], + messages: List[List["langchain_schema.BaseMessage"]], *, run_id: UUID, parent_run_id: Optional[UUID] = None, @@ -414,7 +432,7 @@ def on_chat_model_start( def on_llm_end( self, - response: langchain_schema.LLMResult, + response: "langchain_schema.LLMResult", *, run_id: UUID, parent_run_id: Optional[UUID] = None, @@ -590,7 +608,7 @@ def on_text(self, text: str, **kwargs: Any) -> Any: def on_agent_action( self, - action: langchain_schema.AgentAction, + action: "langchain_schema.AgentAction", *, run_id: UUID, parent_run_id: Optional[UUID] = None, @@ -612,7 +630,7 @@ def on_agent_action( def on_agent_finish( self, - finish: langchain_schema.AgentFinish, + finish: "langchain_schema.AgentFinish", *, run_id: UUID, parent_run_id: Optional[UUID] = None, From 03facc3feaa222ce679230b883b402963f3d4183 Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Thu, 10 Jul 2025 15:26:46 -0300 Subject: [PATCH 8/8] fix(tests): improve exception handling in integration test for conditional imports - Enhanced exception handling in the `run_integration_test` function by specifying `FileNotFoundError` and `OSError` in the exception clause, ensuring more precise error management. - This update prevents potential silent failures when attempting to unlink temporary files, improving the robustness of the integration tests for conditional imports. --- tests/test_integration_conditional_imports.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_integration_conditional_imports.py b/tests/test_integration_conditional_imports.py index 956c47e5..61324e02 100644 --- a/tests/test_integration_conditional_imports.py +++ b/tests/test_integration_conditional_imports.py @@ -13,11 +13,11 @@ """ import sys -import subprocess import tempfile import textwrap -from pathlib import Path +import subprocess from typing import List, Tuple +from pathlib import Path # Note: pytest is imported automatically when running via pytest # This file can also be run standalone for manual testing @@ -205,7 +205,7 @@ def run_integration_test(module_name: str, dependencies: List[str]) -> Tuple[boo try: Path(blocker_script).unlink() Path(test_script).unlink() - except: + except (FileNotFoundError, OSError): pass pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy