diff --git a/pyproject.toml b/pyproject.toml index d660bfb46..57a42be31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,6 +54,7 @@ dev = [ "toml>=0.10.2,<0.11", "twine>=4.0.1,<5", "maturin>=1.8.2", + "openinference-instrumentation-openai-agents>=0.1.0", "pytest-cov>=6.1.1", "httpx>=0.28.1", "pytest-pretty>=1.3.0", diff --git a/temporalio/contrib/openai_agents/README.md b/temporalio/contrib/openai_agents/README.md index ea9482c33..d458e6d55 100644 --- a/temporalio/contrib/openai_agents/README.md +++ b/temporalio/contrib/openai_agents/README.md @@ -536,6 +536,109 @@ SQLite storage is not suited to a distributed environment. | :--------------- | :-------: | | OpenAI platform | Yes | +## OpenTelemetry Integration + +This integration provides seamless export of OpenAI agent telemetry to OpenTelemetry (OTEL) endpoints for observability and monitoring. The integration automatically handles workflow replay semantics, ensuring spans are only exported when workflows actually complete. + +### Quick Start + +To enable OTEL telemetry export, simply provide exporters to the `OpenAIAgentsPlugin` or `AgentEnvironment`: + +```python +from temporalio.contrib.openai_agents import OpenAIAgentsPlugin +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter + +# Your OTEL endpoint configuration +exporters = [ + OTLPSpanExporter(endpoint="http://localhost:4317"), + # Add multiple exporters for different endpoints as needed +] + +# For production applications +client = await Client.connect( + "localhost:7233", + plugins=[ + OpenAIAgentsPlugin( + otel_exporters=exporters, # Enable OTEL integration + model_params=ModelActivityParameters( + start_to_close_timeout=timedelta(seconds=30) + ) + ), + ], +) + +# For testing +from temporalio.contrib.openai_agents.testing import AgentEnvironment + +async with AgentEnvironment( + model=my_test_model, + otel_exporters=exporters # Enable OTEL integration for tests +) as env: + client = env.applied_on_client(base_client) +``` + +### Features + +- **Multiple Exporters**: Send telemetry to multiple OTEL endpoints simultaneously +- **Replay-Safe**: Spans are only exported when workflows actually complete, not during replays +- **Deterministic IDs**: Consistent span IDs across workflow replays for reliable correlation +- **Automatic Setup**: No manual instrumentation required - just provide exporters +- **Graceful Degradation**: Works seamlessly whether OTEL dependencies are installed or not + +### Dependencies + +OTEL integration requires additional dependencies: + +```bash +pip install openinference-instrumentation-openai-agents opentelemetry-sdk +``` + +Choose the appropriate OTEL exporter for your monitoring system: + +```bash +# For OTLP (works with most OTEL collectors and monitoring systems) +pip install opentelemetry-exporter-otlp + +# For Console output (development/debugging) +pip install opentelemetry-exporter-console + +# Other exporters available for specific systems +pip install opentelemetry-exporter- +``` + +### Example: Multiple Exporters + +```python +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.exporter.console import ConsoleSpanExporter + +exporters = [ + # Production monitoring system + OTLPSpanExporter( + endpoint="https://your-monitoring-system:4317", + headers={"api-key": "your-api-key"} + ), + + # Secondary monitoring endpoint + OTLPSpanExporter(endpoint="https://backup-collector:4317"), + + # Development debugging + ConsoleSpanExporter(), +] + +plugin = OpenAIAgentsPlugin(otel_exporters=exporters) +``` + +### Error Handling + +If you provide OTEL exporters but the required dependencies are not installed, you'll receive a clear error message: + +``` +ImportError: OTEL dependencies not available. Install with: pip install openinference-instrumentation-openai-agents opentelemetry-sdk +``` + +If no OTEL exporters are provided, the integration works normally without any OTEL setup. + ### Voice | Mode | Supported | diff --git a/temporalio/contrib/openai_agents/_otel.py b/temporalio/contrib/openai_agents/_otel.py new file mode 100644 index 000000000..8f10d65a3 --- /dev/null +++ b/temporalio/contrib/openai_agents/_otel.py @@ -0,0 +1,43 @@ +"""OpenTelemetry integration for OpenAI Agents in Temporal workflows. + +This module provides utilities for properly exporting OpenAI agent telemetry +to OpenTelemetry endpoints from within Temporal workflows, handling workflow +replay semantics correctly. +""" + +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SimpleSpanProcessor + +from temporalio import workflow + + +class TemporalSpanProcessor(SimpleSpanProcessor): + """A span processor that handles Temporal workflow replay semantics. + + This processor ensures that spans are only exported when workflows actually + complete, not during intermediate replays. This is crucial for maintaining + correct telemetry data when using OpenAI agents within Temporal workflows. + + Example usage: + from opentelemetry.sdk import trace as trace_sdk + from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + from temporalio.contrib.openai_agents._temporal_trace_provider import TemporalIdGenerator + from temporalio.contrib.openai_agents._otel import TemporalSpanProcessor + from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor + + exporter = InMemorySpanExporter() + provider = trace_sdk.TracerProvider(id_generator=TemporalIdGenerator()) + provider.add_span_processor(TemporalSpanProcessor(exporter)) + OpenAIAgentsInstrumentor().instrument(tracer_provider=provider) + """ + + def on_end(self, span: ReadableSpan) -> None: + """Handle span end events, skipping export during workflow replay. + + Args: + span: The span that has ended. + """ + if workflow.in_workflow() and workflow.unsafe.is_replaying(): + # Skip exporting spans during workflow replay to avoid duplicate telemetry + return + super().on_end(span) diff --git a/temporalio/contrib/openai_agents/_temporal_openai_agents.py b/temporalio/contrib/openai_agents/_temporal_openai_agents.py index c1ace7a55..c243c7abf 100644 --- a/temporalio/contrib/openai_agents/_temporal_openai_agents.py +++ b/temporalio/contrib/openai_agents/_temporal_openai_agents.py @@ -20,7 +20,7 @@ TemporalTraceProvider, ) from temporalio.contrib.openai_agents._trace_interceptor import ( - OpenAIAgentsTracingInterceptor, + OpenAIAgentsContextPropagationInterceptor, ) from temporalio.contrib.openai_agents.workflow import AgentsWorkflowError from temporalio.contrib.pydantic import ( @@ -36,6 +36,8 @@ from temporalio.worker.workflow_sandbox import SandboxedWorkflowRunner if typing.TYPE_CHECKING: + from opentelemetry.sdk.trace.export import SpanExporter + from temporalio.contrib.openai_agents import ( StatefulMCPServerProvider, StatelessMCPServerProvider, @@ -46,6 +48,7 @@ def set_open_ai_agent_temporal_overrides( model_params: ModelActivityParameters, auto_close_tracing_in_workflows: bool = False, + start_spans_in_replay: bool = False, ): """Configure Temporal-specific overrides for OpenAI agents. @@ -66,6 +69,7 @@ def set_open_ai_agent_temporal_overrides( Args: model_params: Configuration parameters for Temporal activity execution of model calls. auto_close_tracing_in_workflows: If set to true, close tracing spans immediately. + start_spans_in_replay: If set to true, start spans even during replay. Primarily used for otel integration. Returns: A context manager that yields the configured TemporalTraceProvider. @@ -73,7 +77,8 @@ def set_open_ai_agent_temporal_overrides( previous_runner = get_default_agent_runner() previous_trace_provider = get_trace_provider() provider = TemporalTraceProvider( - auto_close_in_workflows=auto_close_tracing_in_workflows + auto_close_in_workflows=auto_close_tracing_in_workflows, + start_spans_in_replay=start_spans_in_replay, ) try: @@ -181,6 +186,8 @@ def __init__( "StatelessMCPServerProvider | StatefulMCPServerProvider" ] = (), register_activities: bool = True, + add_temporal_spans: bool = True, + otel_exporters: Sequence["SpanExporter"] | None = None, ) -> None: """Initialize the OpenAI agents plugin. @@ -196,6 +203,11 @@ def __init__( register_activities: Whether to register activities during the worker execution. This can be disabled on some workers to allow a separation of workflows and activities but should not be disabled on all workers, or agents will not be able to progress. + add_temporal_spans: Whether to add temporal spans to traces + otel_exporters: Optional sequence of OpenTelemetry span exporters for telemetry export. + When provided, the plugin automatically sets up OpenAI agents instrumentation + with proper Temporal workflow replay semantics. Each exporter will receive + a copy of all OpenAI agent spans. If None, no OTEL instrumentation is configured. """ if model_params is None: model_params = ModelActivityParameters() @@ -213,6 +225,9 @@ def __init__( "When configuring a custom provider, the model activity must have start_to_close_timeout or schedule_to_close_timeout" ) + # Store OTEL configuration for later setup + self._otel_exporters = otel_exporters + # Delay activity construction until they are actually needed def add_activities( activities: Sequence[Callable] | None, @@ -246,13 +261,51 @@ def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: @asynccontextmanager async def run_context() -> AsyncIterator[None]: - with set_open_ai_agent_temporal_overrides(model_params): - yield + # Set up OTEL instrumentation if exporters are provided + otel_instrumentor = None + if self._otel_exporters is not None: + from openinference.instrumentation.openai_agents import ( + OpenAIAgentsInstrumentor, + ) + from opentelemetry.sdk import trace as trace_sdk + + from temporalio.contrib.openai_agents._otel import TemporalSpanProcessor + from temporalio.contrib.openai_agents._temporal_trace_provider import ( + TemporalIdGenerator, + ) + + # Create trace provider with deterministic ID generation + provider = trace_sdk.TracerProvider(id_generator=TemporalIdGenerator()) + + # Add all exporters with TemporalSpanProcessor wrapper + for exporter in self._otel_exporters: + processor = TemporalSpanProcessor(exporter) + provider.add_span_processor(processor) + + # Set up instrumentor + otel_instrumentor = OpenAIAgentsInstrumentor() + otel_instrumentor.instrument(tracer_provider=provider) + + try: + with set_open_ai_agent_temporal_overrides( + model_params, start_spans_in_replay=self._otel_exporters is not None + ): + yield + finally: + # Clean up OTEL instrumentation + if otel_instrumentor is not None: + otel_instrumentor.uninstrument() + + interceptor = OpenAIAgentsContextPropagationInterceptor( + add_temporal_spans=add_temporal_spans, + start_traces=self._otel_exporters is not None, + ) super().__init__( name="OpenAIAgentsPlugin", data_converter=_data_converter, - worker_interceptors=[OpenAIAgentsTracingInterceptor()], + client_interceptors=[interceptor], + worker_interceptors=[interceptor], activities=add_activities, workflow_runner=workflow_runner, workflow_failure_exception_types=[AgentsWorkflowError], diff --git a/temporalio/contrib/openai_agents/_temporal_trace_provider.py b/temporalio/contrib/openai_agents/_temporal_trace_provider.py index 8ea6cadcb..b0f4b134c 100644 --- a/temporalio/contrib/openai_agents/_temporal_trace_provider.py +++ b/temporalio/contrib/openai_agents/_temporal_trace_provider.py @@ -1,5 +1,6 @@ """Provides support for integration with OpenAI Agents SDK tracing across workflows""" +import random import uuid from types import TracebackType from typing import Any, cast @@ -13,9 +14,10 @@ SynchronousMultiTracingProcessor, ) from agents.tracing.spans import Span +from opentelemetry.sdk.trace.id_generator import IdGenerator +from opentelemetry.trace import INVALID_SPAN_ID, INVALID_TRACE_ID from temporalio import workflow -from temporalio.contrib.openai_agents._trace_interceptor import RunIdRandom from temporalio.workflow import ReadOnlyContextError @@ -79,11 +81,15 @@ def activity_span( class _TemporalTracingProcessor(SynchronousMultiTracingProcessor): def __init__( - self, impl: SynchronousMultiTracingProcessor, auto_close_in_workflows: bool + self, + impl: SynchronousMultiTracingProcessor, + auto_close_in_workflows: bool, + start_spans_in_replay: bool, ): super().__init__() self._impl = impl self._auto_close_in_workflows = auto_close_in_workflows + self._start_spans_in_replay = start_spans_in_replay def add_tracing_processor(self, tracing_processor: TracingProcessor): self._impl.add_tracing_processor(tracing_processor) @@ -92,9 +98,10 @@ def set_processors(self, processors: list[TracingProcessor]): self._impl.set_processors(processors) def on_trace_start(self, trace: Trace) -> None: - if workflow.in_workflow() and workflow.unsafe.is_replaying(): - # In replay mode, don't report - return + if not self._start_spans_in_replay: + if workflow.in_workflow() and workflow.unsafe.is_replaying(): + # In replay mode, don't report + return self._impl.on_trace_start(trace) if self._auto_close_in_workflows and workflow.in_workflow(): @@ -110,10 +117,10 @@ def on_trace_end(self, trace: Trace) -> None: self._impl.on_trace_end(trace) def on_span_start(self, span: Span[Any]) -> None: - if workflow.in_workflow() and workflow.unsafe.is_replaying(): - # In replay mode, don't report - return - + if not self._start_spans_in_replay: + if workflow.in_workflow() and workflow.unsafe.is_replaying(): + # In replay mode, don't report + return self._impl.on_span_start(span) if self._auto_close_in_workflows and workflow.in_workflow(): self._impl.on_span_end(span) @@ -134,6 +141,22 @@ def force_flush(self) -> None: self._impl.force_flush() +class RunIdRandom: + """Random uuid generator seeded by the run id of the workflow. + Doesn't currently support replay over reset correctly. + """ + + def __init__(self): + """Create a new random UUID generator.""" + self._random = random.Random("OpenAIPlugin" + workflow.info().run_id) + + def uuid4(self) -> str: + """Generate a random UUID.""" + return uuid.UUID( + bytes=random.getrandbits(16 * 8).to_bytes(16, "big"), version=4 + ).hex[:24] + + def _workflow_uuid() -> str: random = cast( RunIdRandom, getattr(workflow.instance(), "__temporal_openai_tracing_random") @@ -141,16 +164,80 @@ def _workflow_uuid() -> str: return random.uuid4() +class TemporalIdGenerator(IdGenerator): + """OpenTelemetry ID generator that provides deterministic IDs for Temporal workflows. + + This generator ensures that span and trace IDs are deterministic when running + within Temporal workflows by using the workflow's deterministic random source. + This is crucial for maintaining consistency across workflow replays. + """ + + def __init__(self): + """Initialize the ID generator with empty trace and span pools.""" + self.traces = [] + self.spans = [] + + def generate_span_id(self) -> int: + """Generate a deterministic span ID. + + Uses the workflow's deterministic random source when in a workflow context, + otherwise falls back to system random. + + Returns: + A 64-bit span ID that is guaranteed not to be INVALID_SPAN_ID. + """ + if workflow.in_workflow(): + get_rand_bits = workflow.random().getrandbits + else: + import random + + get_rand_bits = random.getrandbits + + if len(self.spans) > 0: + return self.spans.pop() + + span_id = get_rand_bits(64) + while span_id == INVALID_SPAN_ID: + span_id = get_rand_bits(64) + return span_id + + def generate_trace_id(self) -> int: + """Generate a deterministic trace ID. + + Uses the workflow's deterministic random source when in a workflow context, + otherwise falls back to system random. + + Returns: + A 128-bit trace ID that is guaranteed not to be INVALID_TRACE_ID. + """ + if workflow.in_workflow(): + get_rand_bits = workflow.random().getrandbits + else: + import random + + get_rand_bits = random.getrandbits + if len(self.traces) > 0: + return self.traces.pop() + + trace_id = get_rand_bits(128) + while trace_id == INVALID_TRACE_ID: + trace_id = get_rand_bits(128) + return trace_id + + class TemporalTraceProvider(DefaultTraceProvider): """A trace provider that integrates with Temporal workflows.""" - def __init__(self, auto_close_in_workflows: bool = False): + def __init__( + self, auto_close_in_workflows: bool = False, start_spans_in_replay: bool = False + ): """Initialize the TemporalTraceProvider.""" super().__init__() self._original_provider = cast(DefaultTraceProvider, get_trace_provider()) self._multi_processor = _TemporalTracingProcessor( self._original_provider._multi_processor, auto_close_in_workflows, + start_spans_in_replay, ) def time_iso(self) -> str: diff --git a/temporalio/contrib/openai_agents/_trace_interceptor.py b/temporalio/contrib/openai_agents/_trace_interceptor.py index d099ae09b..eedd468d0 100644 --- a/temporalio/contrib/openai_agents/_trace_interceptor.py +++ b/temporalio/contrib/openai_agents/_trace_interceptor.py @@ -13,7 +13,7 @@ get_trace_provider, ) from agents.tracing.scope import Scope -from agents.tracing.spans import NoOpSpan, Span +from agents.tracing.spans import Span import temporalio.activity import temporalio.api.common.v1 @@ -30,22 +30,17 @@ class _InputWithHeaders(Protocol): headers: Mapping[str, temporalio.api.common.v1.Payload] -def set_header_from_context( - input: _InputWithHeaders, payload_converter: temporalio.converter.PayloadConverter -) -> None: +def set_header_from_context(input: _InputWithHeaders) -> None: """Inserts the OpenAI Agents trace/span data in the input header.""" current = get_current_span() - if current is None or isinstance(current, NoOpSpan): - return - trace = get_trace_provider().get_current_trace() input.headers = { **input.headers, - HEADER_KEY: payload_converter.to_payload( + HEADER_KEY: temporalio.converter.PayloadConverter.default.to_payload( { "traceName": trace.name if trace else "Unknown Workflow", - "spanId": current.span_id, - "traceId": current.trace_id, + "spanId": current.span_id if current else None, + "traceId": trace.trace_id if trace else None, } ), } @@ -53,9 +48,9 @@ def set_header_from_context( @contextmanager def context_from_header( - span_name: str, input: _InputWithHeaders, payload_converter: temporalio.converter.PayloadConverter, + start_trace: bool = False, ): """Extracts and initializes trace information the input header.""" payload = input.headers.get(HEADER_KEY) @@ -63,48 +58,93 @@ def context_from_header( if span_info is None: yield else: - workflow_type = ( - activity.info().workflow_type - if activity.in_activity() - else workflow.info().workflow_type - ) - data = ( - { - "activityId": activity.info().activity_id, - "activity": activity.info().activity_type, - } - if activity.in_activity() - else None - ) current_trace = get_trace_provider().get_current_trace() - if current_trace is None: - metadata = { - "temporal:workflowId": activity.info().workflow_id - if activity.in_activity() - else workflow.info().workflow_id, - "temporal:runId": activity.info().workflow_run_id - if activity.in_activity() - else workflow.info().run_id, - "temporal:workflowType": workflow_type, - } + if current_trace is None and span_info["traceId"] is not None: current_trace = trace( span_info["traceName"], trace_id=span_info["traceId"], - metadata=metadata, ) - Scope.set_current_trace(current_trace) + + if start_trace: + current_trace.start(mark_as_current=True) + else: + Scope.set_current_trace(current_trace) + current_span = get_trace_provider().get_current_span() - if current_span is None: + if current_span is None and span_info["spanId"] is not None: current_span = get_trace_provider().create_span( span_data=CustomSpanData(name="", data={}), span_id=span_info["spanId"] ) - Scope.set_current_span(current_span) + if start_trace: + current_span.start(mark_as_current=True) + else: + Scope.set_current_span(current_span) + + yield + + +@contextmanager +def temporal_span( + add_temporal_spans: bool, + span_name: str, +): + """Create a temporal span context manager. + + Args: + add_temporal_spans: Whether to add temporal-specific span data. + span_name: The name of the span to create. + + Yields: + A span context with temporal metadata if enabled. + """ + if add_temporal_spans: + """Extracts and initializes trace information the input header.""" + data = ( + { + "activityId": activity.info().activity_id, + "activity": activity.info().activity_type, + } + if activity.in_activity() + else None + ) + current_span = get_trace_provider().get_current_span() with custom_span(name=span_name, parent=current_span, data=data): yield + else: + yield + + +class RunIdRandom: + """Random uuid generator seeded by the run id of the workflow. + Doesn't currently support replay over reset correctly. + """ + + def __init__(self): + """Create a new random UUID generator.""" + self._random = random.Random("OpenAIPlugin" + workflow.info().run_id) + + def uuid4(self) -> str: + """Generate a random UUID.""" + return uuid.UUID( + bytes=random.getrandbits(16 * 8).to_bytes(16, "big"), version=4 + ).hex[:24] + + +def _ensure_tracing_random() -> None: + """We use a custom uuid generator for spans to ensure that changes to user code workflow.random usage + do not affect tracing and vice versa. + """ + instance = workflow.instance() + if not hasattr(instance, "__temporal_openai_tracing_random"): + setattr( + workflow.instance(), + "__temporal_openai_tracing_random", + RunIdRandom(), + ) -class OpenAIAgentsTracingInterceptor( +class OpenAIAgentsContextPropagationInterceptor( temporalio.client.Interceptor, temporalio.worker.Interceptor ): """Interceptor that propagates OpenAI agent tracing context through Temporal workflows and activities. @@ -131,15 +171,22 @@ class OpenAIAgentsTracingInterceptor( def __init__( self, payload_converter: temporalio.converter.PayloadConverter = temporalio.converter.default().payload_converter, + add_temporal_spans: bool = True, + start_traces: bool = False, ) -> None: """Initialize the interceptor with a payload converter. Args: payload_converter: The payload converter to use for serializing/deserializing trace context. Defaults to the default Temporal payload converter. + add_temporal_spans: Whether to add temporal-specific spans to traces. + start_traces: Whether to start new traces if none exist. This will cause duplication if the underlying + trace provider actually process start events. Primarily designed for use with Open Telemetry integration. """ super().__init__() self._payload_converter = payload_converter + self._start_traces = start_traces + self._add_temporal_spans = add_temporal_spans def intercept_client( self, next: temporalio.client.OutboundInterceptor @@ -153,7 +200,7 @@ def intercept_client( An interceptor that propagates trace context for client operations. """ return _ContextPropagationClientOutboundInterceptor( - next, self._payload_converter + next, self._add_temporal_spans ) def intercept_activity( @@ -167,7 +214,9 @@ def intercept_activity( Returns: An interceptor that propagates trace context for activity operations. """ - return _ContextPropagationActivityInboundInterceptor(next) + return _ContextPropagationActivityInboundInterceptor( + next, self._add_temporal_spans + ) def workflow_interceptor_class( self, input: temporalio.worker.WorkflowInterceptorClassInput @@ -180,7 +229,21 @@ def workflow_interceptor_class( Returns: The class of the workflow interceptor that propagates trace context. """ - return _ContextPropagationWorkflowInboundInterceptor + + class ModifiedInterceptor(_ContextPropagationWorkflowInboundInterceptor): + start_trace = self._start_traces + add_temporal_spans = self._add_temporal_spans + + return ModifiedInterceptor + + +@contextmanager +def _maybe_span(add_temporal_spans: bool, span_name: str, data: dict[str, Any] | None): + if add_temporal_spans and get_trace_provider().get_current_trace() is not None: + with custom_span(name=span_name, data=data): + yield + else: + yield class _ContextPropagationClientOutboundInterceptor( @@ -189,166 +252,123 @@ class _ContextPropagationClientOutboundInterceptor( def __init__( self, next: temporalio.client.OutboundInterceptor, - payload_converter: temporalio.converter.PayloadConverter, + add_temporal_spans: bool, ) -> None: super().__init__(next) - self._payload_converter = payload_converter + self._add_temporal_spans = add_temporal_spans async def start_workflow( self, input: temporalio.client.StartWorkflowInput ) -> temporalio.client.WorkflowHandle[Any, Any]: - metadata = { - "temporal:workflowType": input.workflow, - **({"temporal:workflowId": input.id} if input.id else {}), - } data = {"workflowId": input.id} if input.id else None span_name = "temporal:startWorkflow" - if get_trace_provider().get_current_trace() is None: - with trace( - span_name + ":" + input.workflow, metadata=metadata, group_id=input.id - ): - with custom_span(name=span_name + ":" + input.workflow, data=data): - set_header_from_context(input, self._payload_converter) - return await super().start_workflow(input) - else: - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - return await super().start_workflow(input) + with _maybe_span( + self._add_temporal_spans, + span_name + ":" + input.workflow, + data=data, + ): + set_header_from_context(input) + return await super().start_workflow(input) async def query_workflow(self, input: temporalio.client.QueryWorkflowInput) -> Any: - metadata = { - "temporal:queryWorkflow": input.query, - **({"temporal:workflowId": input.id} if input.id else {}), - } data = {"workflowId": input.id, "query": input.query} span_name = "temporal:queryWorkflow" - if get_trace_provider().get_current_trace() is None: - with trace(span_name, metadata=metadata, group_id=input.id): - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - return await super().query_workflow(input) - else: - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - return await super().query_workflow(input) + with _maybe_span( + self._add_temporal_spans, + span_name, + data=data, + ): + set_header_from_context(input) + return await super().query_workflow(input) async def signal_workflow( self, input: temporalio.client.SignalWorkflowInput ) -> None: - metadata = { - "temporal:signalWorkflow": input.signal, - **({"temporal:workflowId": input.id} if input.id else {}), - } data = {"workflowId": input.id, "signal": input.signal} span_name = "temporal:signalWorkflow" - if get_trace_provider().get_current_trace() is None: - with trace(span_name, metadata=metadata, group_id=input.id): - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - await super().signal_workflow(input) - else: - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - await super().signal_workflow(input) + with _maybe_span( + self._add_temporal_spans, + span_name, + data=data, + ): + set_header_from_context(input) + await super().signal_workflow(input) async def start_workflow_update( self, input: temporalio.client.StartWorkflowUpdateInput ) -> temporalio.client.WorkflowUpdateHandle[Any]: - metadata = { - "temporal:updateWorkflow": input.update, - **({"temporal:workflowId": input.id} if input.id else {}), - } data = { **({"workflowId": input.id} if input.id else {}), "update": input.update, } span_name = "temporal:updateWorkflow" - if get_trace_provider().get_current_trace() is None: - with trace(span_name, metadata=metadata, group_id=input.id): - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - return await self.next.start_workflow_update(input) - else: - with custom_span(name=span_name, data=data): - set_header_from_context(input, self._payload_converter) - return await self.next.start_workflow_update(input) + with _maybe_span( + self._add_temporal_spans, + span_name, + data=data, + ): + set_header_from_context(input) + return await self.next.start_workflow_update(input) class _ContextPropagationActivityInboundInterceptor( temporalio.worker.ActivityInboundInterceptor ): + def __init__( + self, + next: temporalio.worker.ActivityInboundInterceptor, + add_temporal_spans: bool, + ) -> None: + super().__init__(next) + self._add_temporal_spans = add_temporal_spans + async def execute_activity( self, input: temporalio.worker.ExecuteActivityInput ) -> Any: - with context_from_header( - "temporal:executeActivity", input, temporalio.activity.payload_converter() - ): - return await self.next.execute_activity(input) - - -class RunIdRandom: - """Random uuid generator seeded by the run id of the workflow. - Doesn't currently support replay over reset correctly. - """ - - def __init__(self): - """Create a new random UUID generator.""" - self._random = random.Random("OpenAIPlugin" + workflow.info().run_id) - - def uuid4(self) -> str: - """Generate a random UUID.""" - return uuid.UUID( - bytes=random.getrandbits(16 * 8).to_bytes(16, "big"), version=4 - ).hex[:24] - - -def _ensure_tracing_random() -> None: - """We use a custom uuid generator for spans to ensure that changes to user code workflow.random usage - do not affect tracing and vice versa. - """ - instance = workflow.instance() - if not hasattr(instance, "__temporal_openai_tracing_random"): - setattr( - workflow.instance(), - "__temporal_openai_tracing_random", - RunIdRandom(), - ) + with context_from_header(input, temporalio.activity.payload_converter()): + with temporal_span(self._add_temporal_spans, "temporal:executeActivity"): + return await self.next.execute_activity(input) class _ContextPropagationWorkflowInboundInterceptor( temporalio.worker.WorkflowInboundInterceptor ): + start_trace: bool = False + add_temporal_spans: bool = True + def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: - self.next.init(_ContextPropagationWorkflowOutboundInterceptor(outbound)) + class ModifiedInterceptor(_ContextPropagationWorkflowOutboundInterceptor): + add_temporal_spans = self.add_temporal_spans + + self.next.init(ModifiedInterceptor(outbound)) async def execute_workflow( self, input: temporalio.worker.ExecuteWorkflowInput ) -> Any: + print("Executing workflow") _ensure_tracing_random() with context_from_header( - "temporal:executeWorkflow", input, temporalio.workflow.payload_converter() + input, temporalio.workflow.payload_converter(), start_trace=self.start_trace ): - return await self.next.execute_workflow(input) + with temporal_span(self.add_temporal_spans, "temporal:executeWorkflow"): + return await self.next.execute_workflow(input) async def handle_signal(self, input: temporalio.worker.HandleSignalInput) -> None: _ensure_tracing_random() - with context_from_header( - "temporal:handleSignal", input, temporalio.workflow.payload_converter() - ): - return await self.next.handle_signal(input) + with context_from_header(input, temporalio.workflow.payload_converter()): + with temporal_span(self.add_temporal_spans, "temporal:handleSignal"): + return await self.next.handle_signal(input) async def handle_query(self, input: temporalio.worker.HandleQueryInput) -> Any: _ensure_tracing_random() - with context_from_header( - "temporal:handleQuery", input, temporalio.workflow.payload_converter() - ): + # with context_from_header(input, temporalio.workflow.payload_converter()): + with temporal_span(self.add_temporal_spans, "temporal:handleQuery"): return await self.next.handle_query(input) def handle_update_validator( self, input: temporalio.worker.HandleUpdateInput ) -> None: with context_from_header( - "temporal:handleUpdateValidator", input, temporalio.workflow.payload_converter(), ): @@ -359,7 +379,6 @@ async def handle_update_handler( ) -> Any: _ensure_tracing_random() with context_from_header( - "temporal:handleUpdateHandler", input, temporalio.workflow.payload_converter(), ): @@ -369,34 +388,28 @@ async def handle_update_handler( class _ContextPropagationWorkflowOutboundInterceptor( temporalio.worker.WorkflowOutboundInterceptor ): + add_temporal_spans: bool = True + async def signal_child_workflow( self, input: temporalio.worker.SignalChildWorkflowInput ) -> None: - trace = get_trace_provider().get_current_trace() - if trace: - with custom_span( - name="temporal:signalChildWorkflow", - data={"workflowId": input.child_workflow_id}, - ): - set_header_from_context(input, temporalio.workflow.payload_converter()) - await self.next.signal_child_workflow(input) - else: - set_header_from_context(input, temporalio.workflow.payload_converter()) + with _maybe_span( + self.add_temporal_spans, + "temporal:signalChildWorkflow", + data={"workflowId": input.child_workflow_id}, + ): + set_header_from_context(input) await self.next.signal_child_workflow(input) async def signal_external_workflow( self, input: temporalio.worker.SignalExternalWorkflowInput ) -> None: - trace = get_trace_provider().get_current_trace() - if trace: - with custom_span( - name="temporal:signalExternalWorkflow", - data={"workflowId": input.workflow_id}, - ): - set_header_from_context(input, temporalio.workflow.payload_converter()) - await self.next.signal_external_workflow(input) - else: - set_header_from_context(input, temporalio.workflow.payload_converter()) + with _maybe_span( + self.add_temporal_spans, + "temporal:signalExternalWorkflow", + data={"workflowId": input.workflow_id}, + ): + set_header_from_context(input) await self.next.signal_external_workflow(input) def start_activity( @@ -404,13 +417,13 @@ def start_activity( ) -> temporalio.workflow.ActivityHandle: trace = get_trace_provider().get_current_trace() span: Span | None = None - if trace: + if trace and self.add_temporal_spans: span = custom_span( name="temporal:startActivity", data={"activity": input.activity} ) span.start(mark_as_current=True) - set_header_from_context(input, temporalio.workflow.payload_converter()) + set_header_from_context(input) handle = self.next.start_activity(input) if span: handle.add_done_callback(lambda _: span.finish()) # type: ignore @@ -421,12 +434,12 @@ async def start_child_workflow( ) -> temporalio.workflow.ChildWorkflowHandle: trace = get_trace_provider().get_current_trace() span: Span | None = None - if trace: + if trace and self.add_temporal_spans: span = custom_span( name="temporal:startChildWorkflow", data={"workflow": input.workflow} ) span.start(mark_as_current=True) - set_header_from_context(input, temporalio.workflow.payload_converter()) + set_header_from_context(input) handle = await self.next.start_child_workflow(input) if span: handle.add_done_callback(lambda _: span.finish()) # type: ignore @@ -437,12 +450,12 @@ def start_local_activity( ) -> temporalio.workflow.ActivityHandle: trace = get_trace_provider().get_current_trace() span: Span | None = None - if trace: + if trace and self.add_temporal_spans: span = custom_span( name="temporal:startLocalActivity", data={"activity": input.activity} ) span.start(mark_as_current=True) - set_header_from_context(input, temporalio.workflow.payload_converter()) + set_header_from_context(input) handle = self.next.start_local_activity(input) if span: handle.add_done_callback(lambda _: span.finish()) # type: ignore diff --git a/temporalio/contrib/openai_agents/testing.py b/temporalio/contrib/openai_agents/testing.py index c4fea60cb..cff245c08 100644 --- a/temporalio/contrib/openai_agents/testing.py +++ b/temporalio/contrib/openai_agents/testing.py @@ -1,5 +1,6 @@ """Testing utilities for OpenAI agents.""" +import typing from collections.abc import AsyncIterator, Callable, Sequence from typing import Any @@ -37,6 +38,9 @@ "TestModelProvider", ] +if typing.TYPE_CHECKING: + from opentelemetry.sdk.trace.export import SpanExporter + class ResponseBuilders: """Builders for creating model responses for testing. @@ -226,6 +230,8 @@ def __init__( StatelessMCPServerProvider | StatefulMCPServerProvider ] = (), register_activities: bool = True, + add_temporal_spans: bool = True, + otel_exporters: Sequence["SpanExporter"] | None = None, ) -> None: """Initialize the AgentEnvironment. @@ -242,6 +248,10 @@ def __init__( If both are provided, model_provider will be used. mcp_server_providers: Sequence of MCP servers to automatically register with the worker. register_activities: Whether to register activities during worker execution. + add_temporal_spans: Whether to add temporal spans to traces + otel_exporters: Optional sequence of OpenTelemetry span exporters for telemetry export. + When provided, automatically sets up OpenAI agents instrumentation with proper + Temporal workflow replay semantics. If None, no OTEL instrumentation is configured. .. warning:: This API is experimental and may change in the future. @@ -255,6 +265,8 @@ def __init__( self._mcp_server_providers = mcp_server_providers self._register_activities = register_activities self._plugin: OpenAIAgentsPlugin | None = None + self._add_temporal_spans = add_temporal_spans + self._otel_exporters = otel_exporters async def __aenter__(self) -> "AgentEnvironment": """Enter the async context manager.""" @@ -264,6 +276,8 @@ async def __aenter__(self) -> "AgentEnvironment": model_provider=self._model_provider, mcp_server_providers=self._mcp_server_providers, register_activities=self._register_activities, + add_temporal_spans=self._add_temporal_spans, + otel_exporters=self._otel_exporters, ) return self diff --git a/temporalio/plugin.py b/temporalio/plugin.py index db917e337..11582d542 100644 --- a/temporalio/plugin.py +++ b/temporalio/plugin.py @@ -150,11 +150,28 @@ def configure_worker(self, config: WorkerConfig) -> WorkerConfig: if workflow_runner: config["workflow_runner"] = workflow_runner - interceptors = _resolve_append_parameter( - config.get("interceptors"), self.worker_interceptors - ) - if interceptors is not None: - config["interceptors"] = interceptors + client = config.get("client") + + # Don't add new worker interceptors which are already registered in the client. + if ( + self.worker_interceptors + and not callable(self.worker_interceptors) + and client + ): + new_interceptors = list(config.get("interceptors") or []) + for interceptor in self.worker_interceptors: + client_interceptors = client.config(active_config=True).get( + "interceptors" + ) + if not client_interceptors or not interceptor in client_interceptors: + new_interceptors.append(interceptor) + config["interceptors"] = new_interceptors + else: + interceptors = _resolve_append_parameter( + config.get("interceptors"), self.worker_interceptors + ) + if interceptors is not None: + config["interceptors"] = interceptors failure_exception_types = _resolve_append_parameter( config.get("workflow_failure_exception_types"), diff --git a/tests/contrib/openai_agents/research_agents/research_manager.py b/tests/contrib/openai_agents/research_agents/research_manager.py index f37eb6293..98ab550f5 100644 --- a/tests/contrib/openai_agents/research_agents/research_manager.py +++ b/tests/contrib/openai_agents/research_agents/research_manager.py @@ -2,7 +2,7 @@ import asyncio -from agents import Runner, custom_span, gen_trace_id, trace +from agents import Runner, custom_span import temporalio.workflow from tests.contrib.openai_agents.research_agents.planner_agent import ( @@ -24,8 +24,7 @@ def __init__(self): self.writer_agent = new_writer_agent() async def run(self, query: str) -> str: - trace_id = gen_trace_id() - with trace("Research trace", trace_id=trace_id): + with custom_span("Research manager"): search_plan = await self._plan_searches(query) search_results = await self._perform_searches(search_plan) report = await self._write_report(query, search_results) diff --git a/tests/contrib/openai_agents/test_openai_tracing.py b/tests/contrib/openai_agents/test_openai_tracing.py index 39d1cc6f0..c376c512a 100644 --- a/tests/contrib/openai_agents/test_openai_tracing.py +++ b/tests/contrib/openai_agents/test_openai_tracing.py @@ -2,9 +2,11 @@ from datetime import timedelta from typing import Any -from agents import Span, Trace, TracingProcessor +from agents import Span, Trace, TracingProcessor, custom_span, trace from agents.tracing import get_trace_provider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from temporalio import activity, workflow from temporalio.client import Client from temporalio.contrib.openai_agents.testing import ( AgentEnvironment, @@ -13,7 +15,7 @@ ResearchWorkflow, research_mock_model, ) -from tests.helpers import new_worker +from tests.helpers import assert_eq_eventually, new_worker class MemoryTracingProcessor(TracingProcessor): @@ -43,7 +45,6 @@ def force_flush(self) -> None: async def test_tracing(client: Client): async with AgentEnvironment(model=research_mock_model()) as env: client = env.applied_on_client(client) - provider = get_trace_provider() processor = MemoryTracingProcessor() @@ -53,14 +54,15 @@ async def test_tracing(client: Client): client, ResearchWorkflow, ) as worker: - workflow_handle = await client.start_workflow( - ResearchWorkflow.run, - "Caribbean vacation spots in April, optimizing for surfing, hiking and water sports", - id=f"research-workflow-{uuid.uuid4()}", - task_queue=worker.task_queue, - execution_timeout=timedelta(seconds=120), - ) - await workflow_handle.result() + with trace("Research workflow"): + workflow_handle = await client.start_workflow( + ResearchWorkflow.run, + "Caribbean vacation spots in April, optimizing for surfing, hiking and water sports", + id=f"research-workflow-{uuid.uuid4()}", + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=120), + ) + await workflow_handle.result() # There is one closed root trace assert len(processor.trace_events) == 2 @@ -76,25 +78,59 @@ def paired_span(a: tuple[Span[Any], bool], b: tuple[Span[Any], bool]) -> None: assert a[1] assert not b[1] + print( + "\n".join( + [str(event.span_data.export()) for event, _ in processor.span_events] + ) + ) + + # Workflow start spans + paired_span(processor.span_events[0], processor.span_events[1]) + assert ( + processor.span_events[0][0].span_data.export().get("name") + == "temporal:startWorkflow:ResearchWorkflow" + ) + + # Workflow execute spans + paired_span(processor.span_events[2], processor.span_events[-1]) + assert ( + processor.span_events[2][0].span_data.export().get("name") + == "temporal:executeWorkflow" + ) + + # Workflow execute spans + paired_span(processor.span_events[2], processor.span_events[-1]) + assert ( + processor.span_events[2][0].span_data.export().get("name") + == "temporal:executeWorkflow" + ) + + # Overarching research span + paired_span(processor.span_events[3], processor.span_events[-2]) + assert ( + processor.span_events[3][0].span_data.export().get("name") + == "Research manager" + ) + # Initial planner spans - There are only 3 because we don't make an actual model call - paired_span(processor.span_events[0], processor.span_events[5]) + paired_span(processor.span_events[4], processor.span_events[9]) assert ( - processor.span_events[0][0].span_data.export().get("name") == "PlannerAgent" + processor.span_events[4][0].span_data.export().get("name") == "PlannerAgent" ) - paired_span(processor.span_events[1], processor.span_events[4]) + paired_span(processor.span_events[5], processor.span_events[8]) assert ( - processor.span_events[1][0].span_data.export().get("name") + processor.span_events[5][0].span_data.export().get("name") == "temporal:startActivity" ) - paired_span(processor.span_events[2], processor.span_events[3]) + paired_span(processor.span_events[6], processor.span_events[7]) assert ( - processor.span_events[2][0].span_data.export().get("name") + processor.span_events[6][0].span_data.export().get("name") == "temporal:executeActivity" ) - for span, start in processor.span_events[6:-6]: + for span, start in processor.span_events[10:-7]: span_data = span.span_data.export() # All spans should be closed @@ -126,19 +162,566 @@ def paired_span(a: tuple[Span[Any], bool], b: tuple[Span[Any], bool]) -> None: ) # Final writer spans - There are only 3 because we don't make an actual model call - paired_span(processor.span_events[-6], processor.span_events[-1]) + paired_span(processor.span_events[-8], processor.span_events[-3]) assert ( - processor.span_events[-6][0].span_data.export().get("name") == "WriterAgent" + processor.span_events[-8][0].span_data.export().get("name") == "WriterAgent" ) - paired_span(processor.span_events[-5], processor.span_events[-2]) + paired_span(processor.span_events[-7], processor.span_events[-4]) assert ( - processor.span_events[-5][0].span_data.export().get("name") + processor.span_events[-7][0].span_data.export().get("name") == "temporal:startActivity" ) - paired_span(processor.span_events[-4], processor.span_events[-3]) + paired_span(processor.span_events[-6], processor.span_events[-5]) assert ( - processor.span_events[-4][0].span_data.export().get("name") + processor.span_events[-6][0].span_data.export().get("name") == "temporal:executeActivity" ) + + +@activity.defn +async def simple_no_context_activity() -> str: + return "success" + + +@workflow.defn +class TraceWorkflow: + def __init__(self) -> None: + self._proceed = False + self._ready = False + + @workflow.run + async def run(self): + # Workflow creates spans within existing trace context + with custom_span("Workflow span"): + await workflow.execute_activity( + simple_no_context_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + self._ready = True + await workflow.wait_condition(lambda: self._proceed) + return "done" + + @workflow.query + def ready(self) -> bool: + return self._ready + + @workflow.signal + def proceed(self) -> None: + self._proceed = True + + +@workflow.defn +class SelfTracingWorkflow: + def __init__(self) -> None: + self._proceed = False + self._ready = False + + @workflow.run + async def run(self): + # Workflow starts its own trace + with trace("Workflow trace"): + with custom_span("Workflow span"): + await workflow.execute_activity( + simple_no_context_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + self._ready = True + await workflow.wait_condition(lambda: self._proceed) + return "done" + + @workflow.query + def ready(self) -> bool: + return self._ready + + @workflow.signal + def proceed(self) -> None: + self._proceed = True + + +async def test_external_trace_to_workflow_spans(client: Client): + """Test: External trace → workflow spans (with worker restart).""" + exporter = InMemorySpanExporter() + workflow_id = None + task_queue = str(uuid.uuid4()) + + # First worker: Start workflow with external trace context + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + new_client = env.applied_on_client(client) + + async with new_worker( + new_client, + TraceWorkflow, + activities=[simple_no_context_activity], + max_cached_workflows=0, + task_queue=task_queue, + ) as worker: + # Start external trace, then start workflow within that trace + with trace("External trace"): + workflow_handle = await new_client.start_workflow( + TraceWorkflow.run, + id=f"external-trace-workflow-{uuid.uuid4()}", + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=120), + ) + workflow_id = workflow_handle.id + + # Wait for workflow to be ready + async def ready() -> bool: + return await workflow_handle.query(TraceWorkflow.ready) + + await assert_eq_eventually(True, ready) + + # Second worker: Complete the workflow with fresh objects (new instrumentation) + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + new_client = env.applied_on_client(client) + + async with new_worker( + new_client, + TraceWorkflow, + activities=[simple_no_context_activity], + max_cached_workflows=0, + task_queue=task_queue, + ) as worker: + workflow_handle = new_client.get_workflow_handle(workflow_id) + await workflow_handle.signal(TraceWorkflow.proceed) + result = await workflow_handle.result() + assert result == "done" + + spans = exporter.get_finished_spans() + print("External trace → workflow spans:") + print( + "\n".join( + [ + str( + { + "Name": span.name, + "Id": span.context.span_id if span.context else None, + "Parent": span.parent.span_id if span.parent else None, + } + ) + for span in spans + ] + ) + ) + + assert len(spans) >= 2 # External trace + workflow span + + # Find the spans + external_span = next((s for s in spans if s.name == "External trace"), None) + workflow_span = next((s for s in spans if s.name == "Workflow span"), None) + + assert external_span is not None, "External trace span should exist" + assert workflow_span is not None, "Workflow span should exist" + + # Verify parenting: External trace should be root, workflow span should be child of external trace + assert ( + external_span.parent is None + ), "External trace should have no parent (be root)" + assert workflow_span.parent is not None, "Workflow span should have a parent" + assert external_span.context is not None, "External span should have context" + assert ( + workflow_span.parent.span_id == external_span.context.span_id + ), "Workflow span should be child of external trace" + + # Verify all spans have unique IDs + span_ids = [span.context.span_id for span in spans if span.context] + assert len(span_ids) == len( + set(span_ids) + ), f"All spans should have unique IDs, got: {span_ids}" + + +async def test_external_trace_and_span_to_workflow_spans(client: Client): + """Test: External trace + span → workflow spans (with worker restart).""" + exporter = InMemorySpanExporter() + workflow_id = None + task_queue = str(uuid.uuid4()) + + # First worker: Start workflow with external trace + span context + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + new_client = env.applied_on_client(client) + + async with new_worker( + new_client, + TraceWorkflow, + activities=[simple_no_context_activity], + max_cached_workflows=0, + task_queue=task_queue, + ) as worker: + # Start external trace + span, then start workflow within that context + with trace("External trace"): + with custom_span("External span"): + workflow_handle = await new_client.start_workflow( + TraceWorkflow.run, + id=f"external-span-workflow-{uuid.uuid4()}", + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=120), + ) + workflow_id = workflow_handle.id + + # Wait for workflow to be ready + async def ready() -> bool: + return await workflow_handle.query(TraceWorkflow.ready) + + await assert_eq_eventually(True, ready) + + # Second worker: Complete the workflow with fresh objects (new instrumentation) + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + new_client = env.applied_on_client(client) + + async with new_worker( + new_client, + TraceWorkflow, + activities=[simple_no_context_activity], + max_cached_workflows=0, + task_queue=task_queue, + ) as worker: + workflow_handle = new_client.get_workflow_handle(workflow_id) + await workflow_handle.signal(TraceWorkflow.proceed) + result = await workflow_handle.result() + assert result == "done" + + spans = exporter.get_finished_spans() + print("External trace + span → workflow spans:") + print( + "\n".join( + [ + str( + { + "Name": span.name, + "Id": span.context.span_id if span.context else None, + "Parent": span.parent.span_id if span.parent else None, + } + ) + for span in spans + ] + ) + ) + + assert len(spans) >= 3 # External trace + external span + workflow span + + # Find the spans + external_trace_span = next((s for s in spans if s.name == "External trace"), None) + external_span = next((s for s in spans if s.name == "External span"), None) + workflow_span = next((s for s in spans if s.name == "Workflow span"), None) + + assert external_trace_span is not None, "External trace span should exist" + assert external_span is not None, "External span should exist" + assert workflow_span is not None, "Workflow span should exist" + + # Verify parenting: External span should be child of trace, workflow span should be child of external span + assert ( + external_trace_span.parent is None + ), "External trace should have no parent (be root)" + assert external_span.parent is not None, "External span should have a parent" + assert ( + external_trace_span.context is not None + ), "External trace span should have context" + assert ( + external_span.parent.span_id == external_trace_span.context.span_id + ), "External span should be child of external trace" + assert workflow_span.parent is not None, "Workflow span should have a parent" + assert external_span.context is not None, "External span should have context" + assert ( + workflow_span.parent.span_id == external_span.context.span_id + ), "Workflow span should be child of external span" + + # Verify all spans have unique IDs + span_ids = [span.context.span_id for span in spans if span.context] + assert len(span_ids) == len( + set(span_ids) + ), f"All spans should have unique IDs, got: {span_ids}" + + +async def test_workflow_only_trace_to_spans(client: Client): + """Test: Workflow-only trace → spans (with worker restart).""" + exporter = InMemorySpanExporter() + workflow_id = None + task_queue = str(uuid.uuid4()) + + # First worker: Start workflow (no external trace context) + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + new_client = env.applied_on_client(client) + + async with new_worker( + new_client, + SelfTracingWorkflow, + activities=[simple_no_context_activity], + max_cached_workflows=0, + task_queue=task_queue, + ) as worker: + # No external trace - workflow starts its own + workflow_handle = await new_client.start_workflow( + SelfTracingWorkflow.run, + id=f"self-tracing-workflow-{uuid.uuid4()}", + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=120), + ) + workflow_id = workflow_handle.id + + # Wait for workflow to be ready + async def ready() -> bool: + return await workflow_handle.query(SelfTracingWorkflow.ready) + + await assert_eq_eventually(True, ready) + + # Second worker: Complete the workflow with fresh objects (new instrumentation) + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + new_client = env.applied_on_client(client) + + async with new_worker( + new_client, + SelfTracingWorkflow, + activities=[simple_no_context_activity], + max_cached_workflows=0, + task_queue=task_queue, + ) as worker: + workflow_handle = new_client.get_workflow_handle(workflow_id) + await workflow_handle.signal(SelfTracingWorkflow.proceed) + result = await workflow_handle.result() + assert result == "done" + + spans = exporter.get_finished_spans() + print("Workflow-only trace → spans:") + print(f"Total spans: {len(spans)}") + print( + "\n".join( + [ + str( + { + "Name": span.name, + "Id": span.context.span_id if span.context else None, + "Parent": span.parent.span_id if span.parent else None, + } + ) + for span in spans + ] + ) + ) + + # Debug: print all span names + print("Span names:", [span.name for span in spans]) + + assert len(spans) >= 2 # Workflow trace + workflow span + + # Find the spans + workflow_trace_span = next((s for s in spans if s.name == "Workflow trace"), None) + workflow_span = next((s for s in spans if s.name == "Workflow span"), None) + + assert workflow_trace_span is not None, "Workflow trace span should exist" + assert workflow_span is not None, "Workflow span should exist" + + # Verify parenting: Workflow trace should be root, workflow span should be child of workflow trace + assert ( + workflow_trace_span.parent is None + ), "Workflow trace should have no parent (be root)" + assert workflow_span.parent is not None, "Workflow span should have a parent" + assert ( + workflow_trace_span.context is not None + ), "Workflow trace span should have context" + assert ( + workflow_span.parent.span_id == workflow_trace_span.context.span_id + ), "Workflow span should be child of workflow trace" + + +@workflow.defn +class SimpleWorkflow: + @workflow.run + async def run(self) -> str: + # Use custom_span without starting a trace - should be a no-op + with custom_span("Should not appear"): + with custom_span("Neither should this"): + return "done" + + +async def test_custom_span_without_trace_context(client: Client): + """Test that custom_span() without a trace context emits no spans. + + This validates our hypothesis about why the main test fails: + If no OpenAI trace is started, custom_span() calls should be no-ops. + """ + exporter = InMemorySpanExporter() + + async with AgentEnvironment( + model=research_mock_model(), otel_exporters=[exporter] + ) as env: + client = env.applied_on_client(client) + + async with new_worker(client, SimpleWorkflow) as worker: + result = await client.execute_workflow( + SimpleWorkflow.run, + id=f"simple-workflow-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + assert result == "done" + + spans = exporter.get_finished_spans() + + # Should have no custom spans since no trace was started + custom_spans = [ + span + for span in spans + if "Should not appear" in span.name or "Neither should this" in span.name + ] + + assert ( + len(custom_spans) == 0 + ), f"Expected no custom spans without trace context, but found: {[s.name for s in custom_spans]}" + + # Should have no spans at all since no trace was started and spans should be dropped + assert ( + len(spans) == 0 + ), f"Expected no spans without trace context, but found: {[s.name for s in spans]}" + + +async def test_otel_tracing_in_runner(client: Client): + """Test the ergonomic AgentEnvironment OTEL integration.""" + exporter = InMemorySpanExporter() + + # Test the new ergonomic API - just pass exporters to AgentEnvironment + async with AgentEnvironment( + model=research_mock_model(), add_temporal_spans=False, otel_exporters=[exporter] + ) as env: + client = env.applied_on_client(client) + + async with new_worker( + client, + ResearchWorkflow, + max_cached_workflows=0, + ) as worker: + with trace("Research workflow"): + workflow_handle = await client.start_workflow( + ResearchWorkflow.run, + "Caribbean vacation spots in April, optimizing for surfing, hiking and water sports", + id=f"research-workflow-{uuid.uuid4()}", + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=120), + ) + await workflow_handle.result() + + spans = exporter.get_finished_spans() + print("OTEL tracing in runner spans:") + print( + "\n".join( + [ + str( + { + "Name": span.name, + "Id": span.context.span_id if span.context else None, + "Parent": span.parent.span_id if span.parent else None, + } + ) + for span in spans + ] + ) + ) + + # Verify basic span capture + assert len(spans) > 0, "Should have captured some spans from the research workflow" + + # Categorize spans that users expect to see in their agents workflow + research_manager_spans = [span for span in spans if "Research manager" in span.name] + search_web_spans = [span for span in spans if "Search the web" in span.name] + agent_execution_spans = [ + span + for span in spans + if any( + agent_name in span.name.lower() + for agent_name in ["planner", "search", "writer"] + ) + and "workflow" not in span.name.lower() + ] + + all_span_names = [span.name for span in spans] + unique_span_names = list(set(all_span_names)) + + # Assert users get visibility into their workflow coordination + assert len(research_manager_spans) > 0, ( + f"Expected 'Research manager' spans for workflow coordination visibility, " + f"but only found: {unique_span_names}" + ) + + # Assert users can see their search phases + assert len(search_web_spans) > 0, ( + f"Expected 'Search the web' spans for search phase visibility, " + f"but only found: {unique_span_names}" + ) + + # Assert users can see individual agent executions + assert len(agent_execution_spans) > 0, ( + f"Expected agent execution spans (planner, search, writer) for individual agent visibility, " + f"but only found: {unique_span_names}" + ) + + # Validate span hierarchy integrity + span_ids = {span.context.span_id for span in spans if span.context} + for span in spans: + if span.parent: + assert ( + span.parent.span_id in span_ids + ), f"Span '{span.name}' has invalid parent reference - parent span doesn't exist" + + # Validate logical parent-child relationships match user code structure + workflow_trace_spans = [span for span in spans if "Research workflow" in span.name] + assert ( + len(workflow_trace_spans) == 1 + ), f"Expected exactly one 'Research workflow' trace, got {len(workflow_trace_spans)}" + workflow_span = workflow_trace_spans[0] + assert workflow_span.context is not None + + # Research manager should be child of workflow trace + research_span = research_manager_spans[0] + assert research_span.context is not None + assert ( + research_span.parent is not None + ), "Research manager span should have a parent" + assert ( + research_span.parent.span_id == workflow_span.context.span_id + ), "Expected 'Research manager' to be child of 'Research workflow' trace" + + # Search the web should be child of research manager + search_span = search_web_spans[0] + assert search_span.context is not None + assert search_span.parent is not None, "Search the web span should have a parent" + assert ( + search_span.parent.span_id == research_span.context.span_id + ), "Expected 'Search the web' to be child of 'Research manager' span" + + # All search agent spans should be children of "Search the web" + search_agent_spans = [span for span in spans if "Search agent" in span.name] + for search_agent_span in search_agent_spans: + assert ( + search_agent_span.parent is not None + ), f"Search agent span '{search_agent_span.name}' should have a parent" + assert ( + search_agent_span.parent.span_id == search_span.context.span_id + ), f"Expected all 'Search agent' spans to be children of 'Search the web' span" + + # PlannerAgent and WriterAgent should be children of research manager + planner_spans = [span for span in spans if "PlannerAgent" in span.name] + writer_spans = [span for span in spans if "WriterAgent" in span.name] + + for planner_span in planner_spans: + assert planner_span.parent is not None, "PlannerAgent span should have a parent" + assert ( + planner_span.parent.span_id == research_span.context.span_id + ), "Expected 'PlannerAgent' to be child of 'Research manager' span" + + for writer_span in writer_spans: + assert writer_span.parent is not None, "WriterAgent span should have a parent" + assert ( + writer_span.parent.span_id == research_span.context.span_id + ), "Expected 'WriterAgent' to be child of 'Research manager' span" diff --git a/uv.lock b/uv.lock index e29cf27db..e747c4b2e 100644 --- a/uv.lock +++ b/uv.lock @@ -1882,6 +1882,48 @@ litellm = [ { name = "litellm", marker = "python_full_version < '3.14'" }, ] +[[package]] +name = "openinference-instrumentation" +version = "0.1.42" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "openinference-semantic-conventions" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/d0/b19061a21fd6127d2857c77744a36073bba9c1502d1d5e8517b708eb8b7c/openinference_instrumentation-0.1.42.tar.gz", hash = "sha256:2275babc34022e151b5492cfba41d3b12e28377f8e08cb45e5d64fe2d9d7fe37", size = 23954, upload-time = "2025-11-05T01:37:46.869Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/71/43ee4616fc95dbd2f560550f199c6652a5eb93f84e8aa0039bc95c19cfe0/openinference_instrumentation-0.1.42-py3-none-any.whl", hash = "sha256:e7521ff90833ef7cc65db526a2f59b76a496180abeaaee30ec6abbbc0b43f8ec", size = 30086, upload-time = "2025-11-05T01:37:43.866Z" }, +] + +[[package]] +name = "openinference-instrumentation-openai-agents" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "openinference-instrumentation" }, + { name = "openinference-semantic-conventions" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/40/ac0a3ad5040d2582156f6c0fa2b8f6233af79af295dab154d642d42aed69/openinference_instrumentation_openai_agents-1.4.0.tar.gz", hash = "sha256:2fd50d03f6d999b9793566a1f2787bf9e2cd3774fa8bf32542250dfc61e32d62", size = 12746, upload-time = "2025-12-04T19:58:36.319Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/e5/299103b68f5427a7d11acd0f4804c5b3f3e9508a511f8f8078a43ad7e6bd/openinference_instrumentation_openai_agents-1.4.0-py3-none-any.whl", hash = "sha256:539361d0f3bdebdb1e898250fbba8e6173f2bce9d7ba007cf7934f10850f474b", size = 14411, upload-time = "2025-12-04T19:58:34.224Z" }, +] + +[[package]] +name = "openinference-semantic-conventions" +version = "0.1.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/68/81c8a0b90334ff11e4f285e4934c57f30bea3ef0c0b9f99b65e7b80fae3b/openinference_semantic_conventions-0.1.25.tar.gz", hash = "sha256:f0a8c2cfbd00195d1f362b4803518341e80867d446c2959bf1743f1894fce31d", size = 12767, upload-time = "2025-11-05T01:37:45.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/3d/dd14ee2eb8a3f3054249562e76b253a1545c76adbbfd43a294f71acde5c3/openinference_semantic_conventions-0.1.25-py3-none-any.whl", hash = "sha256:3814240f3bd61f05d9562b761de70ee793d55b03bca1634edf57d7a2735af238", size = 10395, upload-time = "2025-11-05T01:37:43.697Z" }, +] + [[package]] name = "opentelemetry-api" version = "1.37.0" @@ -1895,6 +1937,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/48/28ed9e55dcf2f453128df738210a980e09f4e468a456fa3c763dbc8be70a/opentelemetry_api-1.37.0-py3-none-any.whl", hash = "sha256:accf2024d3e89faec14302213bc39550ec0f4095d1cf5ca688e1bfb1c8612f47", size = 65732, upload-time = "2025-09-11T10:28:41.826Z" }, ] +[[package]] +name = "opentelemetry-instrumentation" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/36/7c307d9be8ce4ee7beb86d7f1d31027f2a6a89228240405a858d6e4d64f9/opentelemetry_instrumentation-0.58b0.tar.gz", hash = "sha256:df640f3ac715a3e05af145c18f527f4422c6ab6c467e40bd24d2ad75a00cb705", size = 31549, upload-time = "2025-09-11T11:42:14.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/db/5ff1cd6c5ca1d12ecf1b73be16fbb2a8af2114ee46d4b0e6d4b23f4f4db7/opentelemetry_instrumentation-0.58b0-py3-none-any.whl", hash = "sha256:50f97ac03100676c9f7fc28197f8240c7290ca1baa12da8bfbb9a1de4f34cc45", size = 33019, upload-time = "2025-09-11T11:41:00.624Z" }, +] + [[package]] name = "opentelemetry-sdk" version = "1.37.0" @@ -2985,6 +3042,7 @@ dev = [ { name = "mypy-protobuf" }, { name = "openai-agents" }, { name = "openai-agents", extra = ["litellm"], marker = "python_full_version < '3.14'" }, + { name = "openinference-instrumentation-openai-agents" }, { name = "psutil" }, { name = "pydocstyle" }, { name = "pydoctor" }, @@ -3028,6 +3086,7 @@ dev = [ { name = "mypy-protobuf", specifier = ">=3.3.0,<4" }, { name = "openai-agents", marker = "python_full_version >= '3.14'", specifier = ">=0.3,<0.7" }, { name = "openai-agents", extras = ["litellm"], marker = "python_full_version < '3.14'", specifier = ">=0.3,<0.7" }, + { name = "openinference-instrumentation-openai-agents", specifier = ">=0.1.0" }, { name = "psutil", specifier = ">=5.9.3,<6" }, { name = "pydocstyle", specifier = ">=6.3.0,<7" }, { name = "pydoctor", specifier = ">=25.10.1,<26" }, @@ -3302,6 +3361,75 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04", size = 53482, upload-time = "2025-08-12T05:51:44.467Z" }, + { url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2", size = 38676, upload-time = "2025-08-12T05:51:32.636Z" }, + { url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c", size = 38957, upload-time = "2025-08-12T05:51:54.655Z" }, + { url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775", size = 81975, upload-time = "2025-08-12T05:52:30.109Z" }, + { url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd", size = 83149, upload-time = "2025-08-12T05:52:09.316Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05", size = 82209, upload-time = "2025-08-12T05:52:10.331Z" }, + { url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418", size = 81551, upload-time = "2025-08-12T05:52:31.137Z" }, + { url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390", size = 36464, upload-time = "2025-08-12T05:53:01.204Z" }, + { url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6", size = 38748, upload-time = "2025-08-12T05:53:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18", size = 36810, upload-time = "2025-08-12T05:52:51.906Z" }, + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + [[package]] name = "yarl" version = "1.22.0"