diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..92ab22596 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,108 @@ +# CLAUDE.md — Temporal Python SDK + +## CI Pipeline + +CI is defined in `.github/workflows/ci.yml`. The main jobs are: + +### `build-lint-test` (matrix: Python 3.10/3.14 x multiple OS) +1. `poe build-develop` — builds the Rust bridge via maturin +2. `poe lint` — runs ALL of the following (defined in `pyproject.toml [tool.poe.tasks]`): + - `uv run ruff check --select I` — import sorting + - `uv run ruff format --check` — code formatting + - `uv run pyright` — type checking (whole repo) + - `uv run mypy --namespace-packages --check-untyped-defs .` — type checking (whole repo) + - `uv run basedpyright` — stricter type checking (whole repo, catches more than pyright) + - `uv run pydocstyle --ignore-decorators=overload` — docstring style +3. `poe test` — runs `uv run pytest` +4. Time-skipping tests (non-ARM only) + +### `test-latest-deps` (ubuntu, Python 3.13, upgraded deps) +Same as above but with `uv lock --upgrade` first. + +### `features-tests` +Runs the `temporalio/features` repo tests against this branch. + +## Before Pushing + +Always run the full lint suite locally before pushing: +``` +uv run ruff check --select I +uv run ruff format --check +uv run pyright +uv run mypy --namespace-packages --check-untyped-defs . +uv run basedpyright +uv run pydocstyle --ignore-decorators=overload +``` + +Or equivalently: `poe lint` (requires `poe build-develop` first). + +To auto-fix formatting: `poe format` (runs `ruff check --select I --fix` + `ruff format`). + +## Dev Commands + +All commands use `uv run` prefix. Key poe tasks: +- `poe build-develop` — build Rust bridge (required before lint/test) +- `poe format` — auto-fix formatting +- `poe lint` — run all linters +- `poe test` — run pytest + +## Team Workflow + +This repo uses **agent teams** (not subagents with worktrees). Delegate coding tasks to the `coder` teammate via `SendMessage`. + +**All agents (team-lead and teammates) must load the `temporal-developer` skill** at the start of any task. This provides Temporal-specific guidance for workflows, activities, signals, queries, updates, Nexus, and SDK patterns. + +### What coder CAN do +- Read/explore code (Glob, Grep, Read) +- Edit and write files (Edit, Write) +- Spawn sub-agents for exploration + +### What coder CANNOT do — team-lead must handle +- **Run tests** — `uv run pytest` has no `--prefix` equivalent, and `cd` doesn't persist across Bash calls. +- **Run lints** — same reason (`uv run ruff`, `uv run pyright`, etc.). +- **Git operations** — commits, pushes, branch management. + +### Writing teammate prompts +Be thorough and explicit upfront — don't rely on correcting teammates after launch. Every prompt to coder should include: +- **What to do** — the specific task, relevant file paths, and expected outcome. +- **What NOT to do** — explicitly state that coder cannot run tests or lints. Don't let them try and fail. +- **Operational constraints** — remind them: no compound Bash commands, no `git` commands, no `uv run`. Use `Edit`/`Write`/`Read`/`Glob`/`Grep` only. +- **Load the `temporal-developer` skill** — remind teammates to invoke it at the start of their task. +- **Dev environment context** — whether the Rust bridge is built, which branch they're on, any known lint pitfalls (e.g., basedpyright strictness). +- **Reference material** — point to existing patterns in the codebase (file paths and line numbers) rather than describing from memory. + +### Workflow +1. **Team-lead** sends task to coder with a thorough prompt (see above). +2. **Coder** explores, writes code, reports back. +3. **Team-lead** runs all lints and tests, reports failures back to coder for fixes. +4. **Team-lead** commits and pushes after user approval. + +### Context management +- Delegate aggressively to preserve your context window. +- Do not duplicate work your teammate is doing (don't read the same files they're exploring). +- When coder reports back, trust their findings — don't re-verify unless something seems off. + +## CI Lint Details + +`basedpyright` is the strictest linter and the most common source of CI failures. It catches things the others miss: +- `reportDeprecated` — flags use of deprecated APIs +- `reportUnusedParameter` — unused function parameters +- `reportMissingSuperCall` — missing `super().__init__()` calls +- `reportUninitializedInstanceVariable` — instance vars not set in `__init__` + +Always run `uv run basedpyright` locally before pushing. If it passes, the other type checkers will almost certainly pass too. + +## Time-Skipping Tests + +CI runs tests twice: `poe test` (normal mode) and `poe test --workflow-environment time-skipping` (non-ARM only). The time-skipping test server has a **known limitation: it does not persist headers**. This means any test that depends on header propagation (e.g., tracing context) will fail in time-skipping mode. The established pattern for handling this is: + +```python +if env.supports_time_skipping: + pytest.skip("Time skipping server doesn't persist headers.") +``` + +See `tests/worker/test_workflow.py:8249` for the existing precedent. + +## Branch Naming + +Temporal convention: prepend `maplexu/` to branch names. diff --git a/pyproject.toml b/pyproject.toml index 7a2df7ea8..fafba59f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,7 @@ dev = [ "openinference-instrumentation-google-adk>=0.1.8", "googleapis-common-protos==1.70.0", "pytest-rerunfailures>=16.1", + "langsmith>=0.7.17", ] [tool.poe.tasks] diff --git a/temporalio/contrib/langsmith/__init__.py b/temporalio/contrib/langsmith/__init__.py new file mode 100644 index 000000000..465e36c19 --- /dev/null +++ b/temporalio/contrib/langsmith/__init__.py @@ -0,0 +1,14 @@ +"""LangSmith integration for Temporal SDK. + +This package provides LangSmith tracing integration for Temporal workflows, +activities, and other operations. It includes automatic run creation and +context propagation for distributed tracing in LangSmith. +""" + +from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor +from temporalio.contrib.langsmith._plugin import LangSmithPlugin + +__all__ = [ + "LangSmithInterceptor", + "LangSmithPlugin", +] diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py new file mode 100644 index 000000000..789825251 --- /dev/null +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -0,0 +1,784 @@ +"""LangSmith interceptor implementation for Temporal SDK.""" + +from __future__ import annotations + +import json +import random +import uuid +from collections.abc import Iterator, Mapping +from contextlib import contextmanager +from typing import Any, ClassVar, NoReturn + +from langsmith import tracing_context +from langsmith.run_helpers import get_current_run_tree +from langsmith.run_trees import RunTree + +import temporalio.activity +import temporalio.client +import temporalio.converter +import temporalio.worker +import temporalio.workflow +from temporalio.api.common.v1 import Payload +from temporalio.exceptions import ApplicationError, ApplicationErrorCategory + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +HEADER_KEY = "_temporal-langsmith-context" + +# --------------------------------------------------------------------------- +# Context helpers +# --------------------------------------------------------------------------- + +_payload_converter = temporalio.converter.PayloadConverter.default + + +def _inject_context( + headers: Mapping[str, Payload], + run_tree: Any, +) -> dict[str, Payload]: + """Inject LangSmith context into Temporal payload headers. + + Serializes the run's trace context (trace ID, parent run ID, dotted order) + into a Temporal header under ``_temporal-langsmith-context``, enabling parent-child + trace nesting across process boundaries (client → worker, workflow → activity). + """ + ls_headers = run_tree.to_headers() + return { + **headers, + HEADER_KEY: _payload_converter.to_payloads([ls_headers])[0], + } + + +def _get_current_run_safe() -> RunTree | None: + """Get the current ambient LangSmith run tree.""" + return get_current_run_tree() + + +def _inject_current_context( + headers: Mapping[str, Payload], +) -> Mapping[str, Payload]: + """Inject the current ambient LangSmith context into Temporal payload headers. + + Reads ``get_current_run_tree()`` and injects if present. Returns headers + unchanged if no context is active. Called unconditionally so that context + propagation is independent of the ``add_temporal_runs`` toggle. + """ + current = get_current_run_tree() + if current is not None: + return _inject_context(headers, current) + return headers + + +def _extract_context( + headers: Mapping[str, Payload], +) -> Any | None: + """Extract LangSmith context from Temporal payload headers. + + Reconstructs a :class:`RunTree` from the ``_langsmith-context`` header on + the receiving side, wrapped in a :class:`ReplaySafeRunTree` so inbound + interceptors can establish a parent-child relationship with the sender's + run. Returns ``None`` if no header is present. + """ + header = headers.get(HEADER_KEY) + if not header: + return None + ls_headers = _payload_converter.from_payloads([header])[0] + run = RunTree.from_headers(ls_headers) + return ReplaySafeRunTree(run) if run else None + + +def _inject_nexus_context( + headers: dict[str, str], + run_tree: Any, +) -> dict[str, str]: + """Inject LangSmith context into Nexus string headers.""" + ls_headers = run_tree.to_headers() + return { + **headers, + HEADER_KEY: json.dumps(ls_headers), + } + + +def _extract_nexus_context( + headers: dict[str, str], +) -> Any | None: + """Extract LangSmith context from Nexus string headers.""" + raw = headers.get(HEADER_KEY) + if not raw: + return None + ls_headers = json.loads(raw) + run = RunTree.from_headers(ls_headers) + return ReplaySafeRunTree(run) if run else None + + +# --------------------------------------------------------------------------- +# Sandbox safety: patch @traceable's aio_to_thread +# --------------------------------------------------------------------------- + +_aio_to_thread_patched = False + + +def _patch_aio_to_thread() -> None: + """Patch langsmith's ``aio_to_thread`` to run synchronously in workflows. + + The ``@traceable`` decorator uses ``aio_to_thread()`` → + ``loop.run_in_executor()`` for run setup/teardown. The Temporal workflow + sandbox blocks ``run_in_executor``. This patch runs those functions + synchronously (they are CPU-bound, no I/O) when inside a workflow. + """ + global _aio_to_thread_patched # noqa: PLW0603 + if _aio_to_thread_patched: + return + + import langsmith._internal._aiter as _aiter + + _original = _aiter.aio_to_thread + + import contextvars + + async def _safe_aio_to_thread( + func: Any, + /, + *args: Any, + __ctx: contextvars.Context | None = None, + **kwargs: Any, + ) -> Any: + if not temporalio.workflow.in_workflow(): + return await _original(func, *args, __ctx=__ctx, **kwargs) + with temporalio.workflow.unsafe.sandbox_unrestricted(): + # During replay, disable tracing so @traceable calls don't + # produce duplicate traces for code that already ran. + # Run func directly in the current context (no ctx.run) so + # that context var changes (e.g. _PARENT_RUN_TREE set by + # @traceable's _setup_run) propagate to the caller. + # This is safe because workflows are single-threaded. + if _is_replaying(): + with tracing_context(enabled=False): + return func(*args, **kwargs) + return func(*args, **kwargs) + + _aiter.aio_to_thread = _safe_aio_to_thread # type: ignore[assignment] + _aio_to_thread_patched = True + + +# --------------------------------------------------------------------------- +# Replay safety +# --------------------------------------------------------------------------- + + +def _is_replaying() -> bool: + """Check if we're currently replaying workflow history.""" + return ( + temporalio.workflow.in_workflow() + and temporalio.workflow.unsafe.is_replaying_history_events() + ) + + +def _get_workflow_random() -> random.Random | None: + """Get a deterministic random generator for the current workflow. + + Follows the OTel pattern: creates a workflow-safe random generator once + via ``workflow.new_random()`` and stores it on the workflow instance so + subsequent calls return the same generator. The generator is seeded from + the workflow's deterministic seed, so it produces identical UUIDs across + replays and eviction/restart cycles. + + Returns ``None`` outside a workflow, in read-only (query) contexts, or + when workflow APIs are mocked (unit tests). + """ + try: + if not temporalio.workflow.in_workflow(): + return None + if temporalio.workflow.unsafe.is_read_only(): + return None + inst = temporalio.workflow.instance() + rng = getattr(inst, "__temporal_langsmith_random", None) + if rng is None: + rng = temporalio.workflow.new_random() + setattr(inst, "__temporal_langsmith_random", rng) + return rng + except Exception: + return None + + +def _uuid_from_random(rng: random.Random) -> uuid.UUID: + """Generate a deterministic UUID4 from a workflow-bound random generator.""" + return uuid.UUID(int=rng.getrandbits(128), version=4) + + +# --------------------------------------------------------------------------- +# ReplaySafeRunTree wrapper +# --------------------------------------------------------------------------- + + +class ReplaySafeRunTree(RunTree): + """Wrapper around a :class:`RunTree` with replay-safe ``post``, ``end``, and ``patch``. + + Inherits from :class:`RunTree` so ``isinstance`` checks pass, but does + **not** call ``super().__init__()``—the wrapped ``_run`` is the real + RunTree. Attribute access is delegated via ``__getattr__``/``__setattr__``. + + During replay, ``post()``, ``end()``, and ``patch()`` become no-ops. + Inside a workflow sandbox, these methods are wrapped in + ``sandbox_unrestricted()``. + """ + + def __init__(self, run_tree: RunTree) -> None: # pyright: ignore[reportMissingSuperCall] + """Wrap an existing RunTree with replay-safe overrides.""" + object.__setattr__(self, "_run", run_tree) + + def __getattr__(self, name: str) -> Any: + """Delegate attribute access to the wrapped RunTree.""" + return getattr(self._run, name) + + def __setattr__(self, name: str, value: Any) -> None: + """Delegate attribute setting to the wrapped RunTree.""" + setattr(self._run, name, value) + + def to_headers(self) -> dict[str, Any]: + """Delegate to the wrapped RunTree's to_headers.""" + return self._run.to_headers() + + def post(self, exclude_child_runs: bool = True) -> None: + """Post the run to LangSmith, skipping during replay.""" + if _is_replaying(): + return + if temporalio.workflow.in_workflow(): + with temporalio.workflow.unsafe.sandbox_unrestricted(): + self._run.post(exclude_child_runs=exclude_child_runs) + else: + self._run.post(exclude_child_runs=exclude_child_runs) + + def end(self, **kwargs: Any) -> None: + """End the run, skipping during replay.""" + if _is_replaying(): + return + if temporalio.workflow.in_workflow(): + with temporalio.workflow.unsafe.sandbox_unrestricted(): + self._run.end(**kwargs) + else: + self._run.end(**kwargs) + + def patch(self, *, exclude_inputs: bool = False) -> None: + """Patch the run to LangSmith, skipping during replay.""" + if _is_replaying(): + return + if temporalio.workflow.in_workflow(): + with temporalio.workflow.unsafe.sandbox_unrestricted(): + self._run.patch(exclude_inputs=exclude_inputs) + else: + self._run.patch(exclude_inputs=exclude_inputs) + + +# --------------------------------------------------------------------------- +# _maybe_run context manager +# --------------------------------------------------------------------------- + + +def _is_benign_error(exc: Exception) -> bool: + """Check if an exception is a benign ApplicationError.""" + return ( + isinstance(exc, ApplicationError) + and getattr(exc, "category", None) == ApplicationErrorCategory.BENIGN + ) + + +@contextmanager +def _maybe_run( + client: Any, + name: str, + *, + add_temporal_runs: bool, + run_type: str = "chain", + inputs: dict[str, Any] | None = None, + metadata: dict[str, Any] | None = None, + tags: list[str] | None = None, + parent: Any | None = None, + project_name: str | None = None, +) -> Iterator[Any | None]: + """Create a LangSmith run, handling errors. + + - If add_temporal_runs is False, yields None (no run created). + Context propagation is handled unconditionally by callers. + - When a run IS created, uses :class:`ReplaySafeRunTree` for + replay and sandbox safety, then sets it as ambient context via + ``tracing_context(parent=run_tree)`` so ``get_current_run_tree()`` + returns it and ``_inject_current_context()`` can inject it. + - On exception: marks run as errored (unless benign ApplicationError), re-raises. + + Args: + client: LangSmith client instance. + name: Display name for the run. + add_temporal_runs: Whether to create Temporal-level trace runs. + run_type: LangSmith run type (default ``"chain"``). + inputs: Input data to record on the run. + metadata: Extra metadata to attach to the run. + tags: Tags to attach to the run. + parent: Parent run for nesting. + project_name: LangSmith project name override. + """ + if not add_temporal_runs: + yield None + return + + # If no explicit parent, inherit from ambient @traceable context + if parent is None: + parent = _get_current_run_safe() + + kwargs: dict[str, Any] = dict( + name=name, + run_type=run_type, + inputs=inputs or {}, + ls_client=client, + ) + # Deterministic IDs and start times in workflow context so that runs + # survive eviction/replay with max_cached_workflows=0. Uses a + # workflow-bound random generator (following the OTel pattern) to + # produce identical UUIDs across replays and worker restarts. + rng = _get_workflow_random() + if rng is not None: + kwargs["id"] = _uuid_from_random(rng) + kwargs["start_time"] = temporalio.workflow.now() + elif temporalio.workflow.in_workflow(): + # Read-only context (e.g. query handler) — use workflow.uuid4() + try: + kwargs["id"] = temporalio.workflow.uuid4() + kwargs["start_time"] = temporalio.workflow.now() + except Exception: + pass # Not in a real workflow context (e.g., unit test mock) + if project_name is not None: + kwargs["project_name"] = project_name + if parent is not None: + # Unwrap ReplaySafeRunTree so RunTree gets the real parent + kwargs["parent_run"] = ( + parent._run if isinstance(parent, ReplaySafeRunTree) else parent + ) + if metadata: + kwargs["extra"] = {"metadata": metadata} + if tags: + kwargs["tags"] = tags + run_tree = ReplaySafeRunTree(RunTree(**kwargs)) + run_tree.post() + try: + with tracing_context(parent=run_tree, client=client): + yield run_tree + except Exception as exc: + if not _is_benign_error(exc): + run_tree.end(error=f"{type(exc).__name__}: {exc}") + run_tree.patch() + raise + else: + run_tree.end(outputs={"status": "ok"}) + run_tree.patch() + + +# --------------------------------------------------------------------------- +# LangSmithInterceptor +# --------------------------------------------------------------------------- + + +class LangSmithInterceptor( + temporalio.client.Interceptor, temporalio.worker.Interceptor +): + """Interceptor that supports client and worker LangSmith run creation + and context propagation. + """ + + def __init__( + self, + *, + client: Any | None = None, + project_name: str | None = None, + add_temporal_runs: bool = False, + default_metadata: dict[str, Any] | None = None, + default_tags: list[str] | None = None, + ) -> None: + """Initialize the LangSmith interceptor with tracing configuration.""" + super().__init__() + # Import langsmith.Client lazily to avoid hard dependency at import time + if client is None: + import langsmith + + client = langsmith.Client() + self._client = client + self._project_name = project_name + self._add_temporal_runs = add_temporal_runs + self._default_metadata = default_metadata or {} + self._default_tags = default_tags or [] + + @contextmanager + def maybe_run( + self, + name: str, + *, + run_type: str = "chain", + parent: Any | None = None, + extra_metadata: dict[str, Any] | None = None, + ) -> Iterator[Any | None]: + """Create a LangSmith run with this interceptor's config already applied.""" + metadata = {**self._default_metadata, **(extra_metadata or {})} + with _maybe_run( + self._client, + name, + add_temporal_runs=self._add_temporal_runs, + run_type=run_type, + metadata=metadata, + tags=list(self._default_tags), + parent=parent, + project_name=self._project_name, + ) as run: + yield run + + def intercept_client( + self, next: temporalio.client.OutboundInterceptor + ) -> temporalio.client.OutboundInterceptor: + """Create a client outbound interceptor for LangSmith tracing.""" + return _LangSmithClientOutboundInterceptor(next, self) + + def intercept_activity( + self, next: temporalio.worker.ActivityInboundInterceptor + ) -> temporalio.worker.ActivityInboundInterceptor: + """Create an activity inbound interceptor for LangSmith tracing.""" + return _LangSmithActivityInboundInterceptor(next, self) + + def workflow_interceptor_class( + self, input: temporalio.worker.WorkflowInterceptorClassInput + ) -> type[_LangSmithWorkflowInboundInterceptor]: + """Return the workflow interceptor class with config bound.""" + _patch_aio_to_thread() + config = self + + class InterceptorWithConfig(_LangSmithWorkflowInboundInterceptor): + _config = config + + return InterceptorWithConfig + + def intercept_nexus_operation( + self, next: temporalio.worker.NexusOperationInboundInterceptor + ) -> temporalio.worker.NexusOperationInboundInterceptor: + """Create a Nexus operation inbound interceptor for LangSmith tracing.""" + return _LangSmithNexusOperationInboundInterceptor(next, self) + + +# --------------------------------------------------------------------------- +# Client Outbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithClientOutboundInterceptor(temporalio.client.OutboundInterceptor): + """Instruments all client-side calls with LangSmith runs.""" + + def __init__( + self, + next: temporalio.client.OutboundInterceptor, + config: LangSmithInterceptor, + ) -> None: + super().__init__(next) + self._config = config + + @contextmanager + def _traced_call(self, name: str, input: Any) -> Iterator[None]: + """Wrap a client call with a LangSmith run and inject context into headers.""" + with self._config.maybe_run(name): + input.headers = _inject_current_context(input.headers) + yield + + async def start_workflow(self, input: Any) -> Any: + prefix = "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" + with self._traced_call(f"{prefix}:{input.workflow}", input): + return await super().start_workflow(input) + + async def query_workflow(self, input: Any) -> Any: + with self._traced_call(f"QueryWorkflow:{input.query}", input): + return await super().query_workflow(input) + + async def signal_workflow(self, input: Any) -> None: + with self._traced_call(f"SignalWorkflow:{input.signal}", input): + return await super().signal_workflow(input) + + async def start_workflow_update(self, input: Any) -> Any: + with self._traced_call(f"StartWorkflowUpdate:{input.update}", input): + return await super().start_workflow_update(input) + + async def start_update_with_start_workflow(self, input: Any) -> Any: + with self._config.maybe_run( + f"StartUpdateWithStartWorkflow:{input.start_workflow_input.workflow}", + ): + input.start_workflow_input.headers = _inject_current_context( + input.start_workflow_input.headers + ) + input.update_workflow_input.headers = _inject_current_context( + input.update_workflow_input.headers + ) + return await super().start_update_with_start_workflow(input) + + +# --------------------------------------------------------------------------- +# Activity Inbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithActivityInboundInterceptor( + temporalio.worker.ActivityInboundInterceptor +): + """Instruments activity execution with LangSmith runs.""" + + def __init__( + self, + next: temporalio.worker.ActivityInboundInterceptor, + config: LangSmithInterceptor, + ) -> None: + super().__init__(next) + self._config = config + + async def execute_activity(self, input: Any) -> Any: + parent = _extract_context(input.headers) + info = temporalio.activity.info() + extra_metadata = { + "temporalWorkflowID": info.workflow_id or "", + "temporalRunID": info.workflow_run_id or "", + "temporalActivityID": info.activity_id or "", + } + # Unconditionally set tracing context so @traceable functions inside + # activities can use the plugin's LangSmith client and inherit parent. + # When add_temporal_runs=True: maybe_run overrides with the RunActivity run. + # When add_temporal_runs=False: parent (if any) remains active for @traceable, + # and the client is available even without a parent. + # Override the parent's ls_client so @traceable children (via create_child) + # use the plugin's client rather than lazily creating a real one. + if parent is not None and hasattr(parent, "ls_client"): + parent.ls_client = self._config._client + ctx_kwargs: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + } + if parent: + ctx_kwargs["parent"] = parent + with tracing_context(**ctx_kwargs): + with self._config.maybe_run( + f"RunActivity:{info.activity_type}", + run_type="tool", + parent=parent, + extra_metadata=extra_metadata, + ): + return await super().execute_activity(input) + + +# --------------------------------------------------------------------------- +# Workflow Inbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithWorkflowInboundInterceptor( + temporalio.worker.WorkflowInboundInterceptor +): + """Instruments workflow execution with LangSmith runs.""" + + _config: ClassVar[LangSmithInterceptor] + _current_run: Any | None = None + + def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: + super().init( + _LangSmithWorkflowOutboundInterceptor(outbound, self._config, self) + ) + + @contextmanager + def _workflow_maybe_run( + self, + name: str, + headers: Mapping[str, Payload] | None = None, + *, + is_handler: bool = False, + ) -> Iterator[Any | None]: + """Workflow-specific run creation with metadata. + + Extracts parent from headers (if provided) and stores the run (or parent + fallback) as ``_current_run`` so the outbound interceptor can propagate + context even when ``add_temporal_runs=False``. + + Always sets up ``tracing_context`` so ``@traceable`` functions called + from workflow code can discover the parent and LangSmith client, + independent of the ``add_temporal_runs`` toggle. + + When ``is_handler`` is True and no LangSmith context is found in + headers, skips trace creation if a workflow run is already active + (``_current_run`` is set). This suppresses orphan traces from + uninstrumented client operations (e.g. query polling) while still + allowing handler traces when invoked with propagated context. + """ + parent = _extract_context(headers) if headers else None + if parent is not None: + parent.ls_client = self._config._client + # Handler from an uninstrumented client during workflow execution: + # no LangSmith headers but _current_run is set. Skip trace creation + # to avoid orphan/duplicate handler traces (e.g. query polling). + if is_handler and parent is None and self._current_run is not None: + yield None + return + info = temporalio.workflow.info() + extra_metadata = { + "temporalWorkflowID": info.workflow_id, + "temporalRunID": info.run_id, + } + # Set up tracing context for @traceable functions inside the workflow. + # When add_temporal_runs=True, _maybe_run overrides with the + # RunWorkflow run as parent. When False, this outer context ensures + # @traceable still sees the propagated parent from headers. + ctx_kwargs: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + } + if parent: + ctx_kwargs["parent"] = parent + with tracing_context(**ctx_kwargs): + with self._config.maybe_run( + name, + parent=parent, + extra_metadata=extra_metadata, + ) as run: + prev_run = self._current_run + self._current_run = run or parent + try: + yield run + finally: + self._current_run = prev_run + + async def execute_workflow(self, input: Any) -> Any: + wf_type = temporalio.workflow.info().workflow_type + with self._workflow_maybe_run( + f"RunWorkflow:{wf_type}", + input.headers, + ): + return await super().execute_workflow(input) + + async def handle_signal(self, input: Any) -> None: + with self._workflow_maybe_run( + f"HandleSignal:{input.signal}", input.headers, is_handler=True + ): + return await super().handle_signal(input) + + async def handle_query(self, input: Any) -> Any: + with self._workflow_maybe_run( + f"HandleQuery:{input.query}", input.headers, is_handler=True + ): + return await super().handle_query(input) + + def handle_update_validator(self, input: Any) -> None: + with self._workflow_maybe_run( + f"ValidateUpdate:{input.update}", input.headers, is_handler=True + ): + return super().handle_update_validator(input) + + async def handle_update_handler(self, input: Any) -> Any: + with self._workflow_maybe_run( + f"HandleUpdate:{input.update}", input.headers, is_handler=True + ): + return await super().handle_update_handler(input) + + +# --------------------------------------------------------------------------- +# Workflow Outbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithWorkflowOutboundInterceptor( + temporalio.worker.WorkflowOutboundInterceptor +): + """Instruments all outbound calls from workflow code.""" + + def __init__( + self, + next: temporalio.worker.WorkflowOutboundInterceptor, + config: LangSmithInterceptor, + inbound: _LangSmithWorkflowInboundInterceptor, + ) -> None: + super().__init__(next) + self._config = config + self._inbound = inbound + + @contextmanager + def _traced_outbound(self, name: str, input: Any) -> Iterator[Any | None]: + """Outbound workflow run creation with context injection into input.headers.""" + with self._config.maybe_run(name, parent=self._inbound._current_run) as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_context(input.headers, context_source) + yield run + + def start_activity(self, input: Any) -> Any: + with self._traced_outbound(f"StartActivity:{input.activity}", input): + return super().start_activity(input) + + def start_local_activity(self, input: Any) -> Any: + with self._traced_outbound(f"StartActivity:{input.activity}", input): + return super().start_local_activity(input) + + async def start_child_workflow(self, input: Any) -> Any: + with self._traced_outbound(f"StartChildWorkflow:{input.workflow}", input): + return await super().start_child_workflow(input) + + async def signal_child_workflow(self, input: Any) -> None: + with self._traced_outbound(f"SignalChildWorkflow:{input.signal}", input): + return await super().signal_child_workflow(input) + + async def signal_external_workflow(self, input: Any) -> None: + with self._traced_outbound(f"SignalExternalWorkflow:{input.signal}", input): + return await super().signal_external_workflow(input) + + def continue_as_new(self, input: Any) -> NoReturn: + # No trace created, but inject context from inbound's current run + current_run = getattr(self._inbound, "_current_run", None) + if current_run: + input.headers = _inject_context(input.headers, current_run) + super().continue_as_new(input) + + async def start_nexus_operation(self, input: Any) -> Any: + with self._config.maybe_run( + f"StartNexusOperation:{input.service}/{input.operation_name}", + parent=self._inbound._current_run, + ) as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_nexus_context( + input.headers or {}, context_source + ) + return await super().start_nexus_operation(input) + + +# --------------------------------------------------------------------------- +# Nexus Operation Inbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithNexusOperationInboundInterceptor( + temporalio.worker.NexusOperationInboundInterceptor +): + """Instruments Nexus operations with LangSmith runs.""" + + def __init__( + self, + next: temporalio.worker.NexusOperationInboundInterceptor, + config: LangSmithInterceptor, + ) -> None: + super().__init__(next) + self._config = config + + async def execute_nexus_operation_start(self, input: Any) -> Any: + parent = _extract_nexus_context(input.ctx.headers) + with self._config.maybe_run( + f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", + run_type="tool", + parent=parent, + ): + return await self.next.execute_nexus_operation_start(input) + + async def execute_nexus_operation_cancel(self, input: Any) -> Any: + parent = _extract_nexus_context(input.ctx.headers) + with self._config.maybe_run( + f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", + run_type="tool", + parent=parent, + ): + return await self.next.execute_nexus_operation_cancel(input) diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py new file mode 100644 index 000000000..4fac4d782 --- /dev/null +++ b/temporalio/contrib/langsmith/_plugin.py @@ -0,0 +1,77 @@ +"""LangSmith plugin for Temporal SDK.""" + +from __future__ import annotations + +import dataclasses +from typing import Any + +from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor +from temporalio.plugin import SimplePlugin +from temporalio.worker import WorkflowRunner +from temporalio.worker.workflow_sandbox import SandboxedWorkflowRunner + + +class LangSmithPlugin(SimplePlugin): + """LangSmith tracing plugin for Temporal SDK. + + Provides automatic LangSmith run creation for workflows, activities, + and other Temporal operations with context propagation. + """ + + def __init__( + self, + *, + client: Any | None = None, + project_name: str | None = None, + add_temporal_runs: bool = False, + metadata: dict[str, Any] | None = None, + tags: list[str] | None = None, + ) -> None: + """Initialize the LangSmith plugin. + + Args: + client: A langsmith.Client instance. If None, one will be created + lazily (using LANGSMITH_API_KEY env var). + project_name: LangSmith project name for traces. + add_temporal_runs: Whether to create LangSmith runs for Temporal + operations. Defaults to False. + metadata: Default metadata to attach to all runs. + tags: Default tags to attach to all runs. + """ + interceptor = LangSmithInterceptor( + client=client, + project_name=project_name, + add_temporal_runs=add_temporal_runs, + default_metadata=metadata, + default_tags=tags, + ) + interceptors = [interceptor] + + def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: + if not runner: + raise ValueError("No WorkflowRunner provided to the LangSmith plugin.") + if isinstance(runner, SandboxedWorkflowRunner): + return dataclasses.replace( + runner, + restrictions=runner.restrictions.with_passthrough_modules( + "langsmith" + ), + ) + return runner + + super().__init__( + "langchain.LangSmithPlugin", + interceptors=interceptors, + workflow_runner=workflow_runner, + ) + + async def shutdown(self) -> None: + """Flush the LangSmith client to drain pending runs.""" + if not self.interceptors: + return + interceptor = self.interceptors[0] + if ( + isinstance(interceptor, LangSmithInterceptor) + and interceptor._client is not None + ): + interceptor._client.flush() diff --git a/tests/contrib/langsmith/__init__.py b/tests/contrib/langsmith/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/contrib/langsmith/conftest.py b/tests/contrib/langsmith/conftest.py new file mode 100644 index 000000000..92c6a3db5 --- /dev/null +++ b/tests/contrib/langsmith/conftest.py @@ -0,0 +1,99 @@ +"""Shared test helpers for LangSmith plugin tests.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any +from unittest.mock import MagicMock + + +@dataclass +class _RunRecord: + """A single recorded run.""" + + id: str + parent_run_id: str | None + name: str + run_type: str + inputs: dict[str, Any] + outputs: dict[str, Any] | None = None + error: str | None = None + + +class InMemoryRunCollector: + """Collects runs from a mock LangSmith client. + + Each call to create_run / update_run appends or updates an entry. + """ + + def __init__(self) -> None: + self.runs: list[_RunRecord] = [] + self._by_id: dict[str, _RunRecord] = {} + + def record_create(self, **kwargs: Any) -> None: + rec = _RunRecord( + id=str(kwargs.get("id", kwargs.get("run_id", ""))), + parent_run_id=( + str(kwargs["parent_run_id"]) if kwargs.get("parent_run_id") else None + ), + name=kwargs.get("name", ""), + run_type=kwargs.get("run_type", "chain"), + inputs=kwargs.get("inputs", {}), + ) + self.runs.append(rec) + self._by_id[rec.id] = rec + + def record_update(self, run_id: str, **kwargs: Any) -> None: + run_id_str = str(run_id) + rec = self._by_id.get(run_id_str) + if rec is None: + return + if "outputs" in kwargs: + rec.outputs = kwargs["outputs"] + if "error" in kwargs: + rec.error = kwargs["error"] + + def clear(self) -> None: + self.runs.clear() + self._by_id.clear() + + +def dump_runs(collector: InMemoryRunCollector) -> list[str]: + """Reconstruct parent-child hierarchy from collected runs. + + Returns a list of indented strings, e.g.: + ["StartWorkflow:MyWf", " RunWorkflow:MyWf", " StartActivity:do_thing"] + """ + runs = collector.runs + children: dict[str | None, list[_RunRecord]] = {} + for r in runs: + children.setdefault(r.parent_run_id, []).append(r) + + result: list[str] = [] + + def _walk(parent_id: str | None, depth: int) -> None: + for child in children.get(parent_id, []): + result.append(" " * depth + child.name) + _walk(child.id, depth + 1) + + # Roots: runs whose parent_run_id is None or not in our set + known_ids = {r.id for r in runs} + root_parents = { + r.parent_run_id + for r in runs + if r.parent_run_id is None or r.parent_run_id not in known_ids + } + for rp in sorted(root_parents, key=lambda x: (x is not None, x)): + _walk(rp, 0) + + return result + + +def make_mock_ls_client(collector: InMemoryRunCollector) -> MagicMock: + """Create a mock langsmith.Client wired to a collector.""" + client = MagicMock() + client.create_run.side_effect = collector.record_create + client.update_run.side_effect = collector.record_update + client.session = MagicMock() + client.tracing_queue = MagicMock() + return client diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py new file mode 100644 index 000000000..936627b5b --- /dev/null +++ b/tests/contrib/langsmith/test_integration.py @@ -0,0 +1,744 @@ +"""Integration tests for LangSmith plugin with real Temporal worker.""" + +from __future__ import annotations + +import asyncio +import uuid +from datetime import timedelta +from typing import Any +from unittest.mock import MagicMock + +import nexusrpc.handler +import pytest +from langsmith import traceable, tracing_context + +from temporalio import activity, common, nexus, workflow +from temporalio.client import Client, WorkflowFailureError, WorkflowQueryFailedError +from temporalio.contrib.langsmith import LangSmithPlugin +from temporalio.exceptions import ApplicationError +from temporalio.service import RPCError +from temporalio.testing import WorkflowEnvironment +from tests.contrib.langsmith.conftest import ( + InMemoryRunCollector, + dump_runs, + make_mock_ls_client, +) +from tests.helpers import new_worker +from tests.helpers.nexus import make_nexus_endpoint_name + +# --------------------------------------------------------------------------- +# Shared @traceable functions and activities +# --------------------------------------------------------------------------- + + +@traceable(name="inner_llm_call") +async def _inner_llm_call(prompt: str) -> str: + """Simulates an LLM call decorated with @traceable.""" + return f"response to: {prompt}" + + +@traceable(name="outer_chain") +async def _outer_chain(prompt: str) -> str: + """A @traceable that calls another @traceable.""" + return await _inner_llm_call(prompt) + + +@activity.defn +async def traceable_activity() -> str: + """Activity that calls a @traceable function.""" + result = await _inner_llm_call("hello") + return result + + +@activity.defn +async def nested_traceable_activity() -> str: + """Activity with two levels of @traceable nesting.""" + result = await _outer_chain("hello") + return result + + +# --------------------------------------------------------------------------- +# Shared workflows +# --------------------------------------------------------------------------- + + +@workflow.defn +class TraceableActivityWorkflow: + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class SimpleNexusWorkflow: + @workflow.run + async def run(self, _input: str) -> str: + return await workflow.execute_activity( + traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + + +@nexusrpc.handler.service_handler +class NexusService: + @nexus.workflow_run_operation + async def run_operation( + self, ctx: nexus.WorkflowRunOperationContext, input: str + ) -> nexus.WorkflowHandle[str]: + return await ctx.start_workflow( + SimpleNexusWorkflow.run, + input, + id=f"nexus-wf-{ctx.request_id}", + ) + + +# --------------------------------------------------------------------------- +# Simple/basic workflows and activities +# --------------------------------------------------------------------------- + + +@activity.defn +async def simple_activity() -> str: + return "activity-done" + + +@workflow.defn +class SimpleWorkflow: + @workflow.run + async def run(self) -> str: + result = await workflow.execute_activity( + simple_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + return result + + +# --------------------------------------------------------------------------- +# Signal/query/update workflows +# --------------------------------------------------------------------------- + + +@workflow.defn +class ComprehensiveWorkflow: + def __init__(self) -> None: + self._signal_received = False + self._waiting_for_signal = False + self._complete = False + + @workflow.run + async def run(self) -> str: + # Regular activity + await workflow.execute_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + # Local activity + await workflow.execute_local_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + # Direct @traceable call + await _outer_chain("from-workflow") + # Child workflow + await workflow.execute_child_workflow( + TraceableActivityWorkflow.run, + id=f"child-{workflow.info().workflow_id}", + ) + # Nexus operation + nexus_client = workflow.create_nexus_client( + endpoint=make_nexus_endpoint_name(workflow.info().task_queue), + service=NexusService, + ) + nexus_handle = await nexus_client.start_operation( + operation=NexusService.run_operation, + input="test-input", + ) + await nexus_handle + # Wait for signal + self._waiting_for_signal = True + await workflow.wait_condition(lambda: self._signal_received) + # Post-signal activity (verifies context survives signal wait) + await workflow.execute_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + # Wait for update to complete + await workflow.wait_condition(lambda: self._complete) + return "comprehensive-done" + + @workflow.signal + def my_signal(self, _value: str) -> None: + self._signal_received = True + + @workflow.query + def my_query(self) -> bool: + return self._signal_received + + @workflow.query + def is_waiting_for_signal(self) -> bool: + return self._waiting_for_signal + + @workflow.update + def my_update(self, value: str) -> str: + self._complete = True + return f"updated-{value}" + + @my_update.validator + def validate_my_update(self, value: str) -> None: + if not value: + raise ValueError("empty") + + +# --------------------------------------------------------------------------- +# Error workflows and activities +# --------------------------------------------------------------------------- + + +@activity.defn +async def failing_activity() -> str: + raise ApplicationError("activity-failed", non_retryable=True) + + +@activity.defn +async def benign_failing_activity() -> str: + from temporalio.exceptions import ApplicationErrorCategory + + raise ApplicationError( + "benign-fail", + non_retryable=True, + category=ApplicationErrorCategory.BENIGN, + ) + + +@workflow.defn +class FailingWorkflow: + @workflow.run + async def run(self) -> str: + raise ApplicationError("workflow-failed", non_retryable=True) + + +@workflow.defn +class ActivityFailureWorkflow: + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + failing_activity, + start_to_close_timeout=timedelta(seconds=10), + retry_policy=common.RetryPolicy(maximum_attempts=1), + ) + + +@workflow.defn +class BenignErrorWorkflow: + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + benign_failing_activity, + start_to_close_timeout=timedelta(seconds=10), + retry_policy=common.RetryPolicy(maximum_attempts=1), + ) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_plugin_and_collector( + **kwargs: Any, +) -> tuple[LangSmithPlugin, InMemoryRunCollector, MagicMock]: + """Create a LangSmithPlugin wired to an InMemoryRunCollector via mock client.""" + collector = InMemoryRunCollector() + mock_ls_client = make_mock_ls_client(collector) + plugin = LangSmithPlugin(client=mock_ls_client, **kwargs) + return plugin, collector, mock_ls_client + + +def _make_client_and_collector( + client: Client, **kwargs: Any +) -> tuple[Client, InMemoryRunCollector, MagicMock]: + """Create a Temporal Client with LangSmith plugin and an InMemoryRunCollector.""" + plugin, collector, mock_ls_client = _make_plugin_and_collector(**kwargs) + config = client.config() + config["plugins"] = [plugin] + return Client(**config), collector, mock_ls_client + + +def _make_temporal_client( + client: Client, mock_ls_client: MagicMock, **kwargs: Any +) -> Client: + """Create a Temporal Client with a fresh LangSmith plugin.""" + plugin = LangSmithPlugin(client=mock_ls_client, **kwargs) + config = client.config() + config["plugins"] = [plugin] + return Client(**config) + + +async def _poll_query( + handle: Any, + query: Any, + *, + expected: Any = True, + timeout_secs: float = 10.0, + interval_secs: float = 0.2, +) -> bool: + """Poll a workflow query until it returns the expected value or times out.""" + deadline = asyncio.get_event_loop().time() + timeout_secs + while asyncio.get_event_loop().time() < deadline: + try: + result = await handle.query(query) + if result == expected: + return True + except (WorkflowQueryFailedError, RPCError): + pass # Query not yet available (workflow hasn't started) + await asyncio.sleep(interval_secs) + return False + + +# --------------------------------------------------------------------------- +# TestBasicTracing +# --------------------------------------------------------------------------- + + +class TestBasicTracing: + async def test_workflow_activity_trace_hierarchy( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """StartWorkflow → RunWorkflow → StartActivity → RunActivity hierarchy.""" + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) + + async with new_worker( + temporal_client, + SimpleWorkflow, + activities=[simple_activity], + max_cached_workflows=0, + ) as worker: + result = await temporal_client.start_workflow( + SimpleWorkflow.run, + id=f"basic-trace-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + assert await result.result() == "activity-done" + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:SimpleWorkflow", + " RunWorkflow:SimpleWorkflow", + " StartActivity:simple_activity", + " RunActivity:simple_activity", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + # Verify run_type: RunActivity is "tool", others are "chain" + for run in collector.runs: + if run.name == "RunActivity:simple_activity": + assert ( + run.run_type == "tool" + ), f"Expected RunActivity run_type='tool', got '{run.run_type}'" + else: + assert ( + run.run_type == "chain" + ), f"Expected {run.name} run_type='chain', got '{run.run_type}'" + + # Verify successful runs have outputs == {"status": "ok"} + for run in collector.runs: + assert run.outputs == { + "status": "ok" + }, f"Expected {run.name} outputs={{'status': 'ok'}}, got {run.outputs}" + + +# --------------------------------------------------------------------------- +# TestReplay +# --------------------------------------------------------------------------- + + +class TestReplay: + async def test_no_duplicate_traces_on_replay( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """With max_cached_workflows=0 (forcing replay), no duplicate runs appear.""" + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) + + async with new_worker( + temporal_client, + TraceableActivityWorkflow, + activities=[traceable_activity], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + TraceableActivityWorkflow.run, + id=f"replay-test-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + await handle.result() + + # Workflow→activity→@traceable flow should produce exactly these runs + # with no duplicates from replay: + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch (possible replay duplicates).\n" + f"Expected:\n{expected}\nActual:\n{hierarchy}" + ) + + +# --------------------------------------------------------------------------- +# TestErrorTracing +# --------------------------------------------------------------------------- + + +class TestErrorTracing: + async def test_activity_failure_marked( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """A failing activity run is marked with an error.""" + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) + + async with new_worker( + temporal_client, + ActivityFailureWorkflow, + activities=[failing_activity], + workflow_failure_exception_types=[ApplicationError], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + ActivityFailureWorkflow.run, + id=f"act-fail-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + with pytest.raises(WorkflowFailureError): + await handle.result() + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:ActivityFailureWorkflow", + " RunWorkflow:ActivityFailureWorkflow", + " StartActivity:failing_activity", + " RunActivity:failing_activity", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + # Verify the RunActivity run has an error + activity_runs = [ + r for r in collector.runs if r.name == "RunActivity:failing_activity" + ] + assert len(activity_runs) == 1 + assert activity_runs[0].error == "ApplicationError: activity-failed" + + async def test_workflow_failure_marked( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """A failing workflow run is marked with an error.""" + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) + + async with new_worker( + temporal_client, + FailingWorkflow, + workflow_failure_exception_types=[ApplicationError], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + FailingWorkflow.run, + id=f"wf-fail-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + with pytest.raises(WorkflowFailureError): + await handle.result() + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:FailingWorkflow", + " RunWorkflow:FailingWorkflow", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + # Verify the RunWorkflow run has an error + wf_runs = [r for r in collector.runs if r.name == "RunWorkflow:FailingWorkflow"] + assert len(wf_runs) == 1 + assert wf_runs[0].error == "ApplicationError: workflow-failed" + + async def test_benign_error_not_marked( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """A benign ApplicationError does NOT mark the run as errored.""" + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) + + async with new_worker( + temporal_client, + BenignErrorWorkflow, + activities=[benign_failing_activity], + workflow_failure_exception_types=[ApplicationError], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + BenignErrorWorkflow.run, + id=f"benign-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + with pytest.raises(WorkflowFailureError): + await handle.result() + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:BenignErrorWorkflow", + " RunWorkflow:BenignErrorWorkflow", + " StartActivity:benign_failing_activity", + " RunActivity:benign_failing_activity", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + # The RunActivity run for benign error should NOT have error set + activity_runs = [ + r for r in collector.runs if r.name == "RunActivity:benign_failing_activity" + ] + assert len(activity_runs) == 1 + assert activity_runs[0].error is None + + +# --------------------------------------------------------------------------- +# TestComprehensiveTracing +# --------------------------------------------------------------------------- + + +class TestComprehensiveTracing: + async def test_comprehensive_with_temporal_runs( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """Full trace hierarchy with worker restart mid-workflow. + + Starts workflow on first worker, kills it at signal wait point, + then starts fresh worker+plugin to signal and complete the workflow. + Verifies combined hierarchy from both worker lifetimes in one assertion. + """ + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + + task_queue = f"comprehensive-{uuid.uuid4()}" + workflow_id = f"comprehensive-{uuid.uuid4()}" + collector = InMemoryRunCollector() + mock_ls = make_mock_ls_client(collector) + + @traceable(name="user_pipeline") + async def user_pipeline() -> str: + # Phase 1: Start workflow, run until signal wait + temporal_client_1 = _make_temporal_client( + client, mock_ls, add_temporal_runs=True + ) + async with new_worker( + temporal_client_1, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) + handle = await temporal_client_1.start_workflow( + ComprehensiveWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + ) + # Poll via raw client to avoid creating trace runs + raw_handle = client.get_workflow_handle(workflow_id) + assert await _poll_query( + raw_handle, + ComprehensiveWorkflow.is_waiting_for_signal, + expected=True, + ), "Workflow never reached signal wait point" + + # Phase 2: Fresh worker+plugin, signal to resume, complete + temporal_client_2 = _make_temporal_client( + client, mock_ls, add_temporal_runs=True + ) + async with new_worker( + temporal_client_2, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ): + handle = temporal_client_2.get_workflow_handle(workflow_id) + await handle.query(ComprehensiveWorkflow.my_query) + await handle.signal(ComprehensiveWorkflow.my_signal, "hello") + await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") + return await handle.result() + + with tracing_context(client=mock_ls, enabled=True): + result = await user_pipeline() + + assert result == "comprehensive-done" + + hierarchy = dump_runs(collector) + expected = [ + "user_pipeline", + " StartWorkflow:ComprehensiveWorkflow", + " RunWorkflow:ComprehensiveWorkflow", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " QueryWorkflow:my_query", + " HandleQuery:my_query", + " SignalWorkflow:my_signal", + " HandleSignal:my_signal", + " StartWorkflowUpdate:my_update", + " ValidateUpdate:my_update", + " HandleUpdate:my_update", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + async def test_comprehensive_without_temporal_runs( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """Same comprehensive workflow with add_temporal_runs=False and worker restart. + + Only @traceable runs appear. Context propagation via headers still works. + """ + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + + task_queue = f"comprehensive-no-runs-{uuid.uuid4()}" + workflow_id = f"comprehensive-no-runs-{uuid.uuid4()}" + collector = InMemoryRunCollector() + mock_ls = make_mock_ls_client(collector) + + @traceable(name="user_pipeline") + async def user_pipeline() -> str: + # Phase 1: Start workflow, run until signal wait + temporal_client_1 = _make_temporal_client( + client, mock_ls, add_temporal_runs=False + ) + async with new_worker( + temporal_client_1, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) + handle = await temporal_client_1.start_workflow( + ComprehensiveWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + ) + # Poll via raw client to avoid creating trace runs + raw_handle = client.get_workflow_handle(workflow_id) + assert await _poll_query( + raw_handle, + ComprehensiveWorkflow.is_waiting_for_signal, + expected=True, + ), "Workflow never reached signal wait point" + + # Phase 2: Fresh worker+plugin, signal to resume, complete + temporal_client_2 = _make_temporal_client( + client, mock_ls, add_temporal_runs=False + ) + async with new_worker( + temporal_client_2, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ): + handle = temporal_client_2.get_workflow_handle(workflow_id) + await handle.signal(ComprehensiveWorkflow.my_signal, "hello") + await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") + return await handle.result() + + with tracing_context(client=mock_ls, enabled=True): + result = await user_pipeline() + + assert result == "comprehensive-done" + + hierarchy = dump_runs(collector) + expected = [ + "user_pipeline", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " inner_llm_call", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py new file mode 100644 index 000000000..17050bfe5 --- /dev/null +++ b/tests/contrib/langsmith/test_interceptor.py @@ -0,0 +1,1146 @@ +"""Tests for LangSmith interceptor points and helper functions.""" + +from __future__ import annotations + +import asyncio +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from temporalio.api.common.v1 import Payload +from temporalio.contrib.langsmith import LangSmithInterceptor +from temporalio.contrib.langsmith._interceptor import ( + HEADER_KEY, + ReplaySafeRunTree, + _extract_context, + _inject_context, + _maybe_run, +) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +# Common patch targets (interceptor module) +_MOD = "temporalio.contrib.langsmith._interceptor" +_PATCH_RUNTREE = f"{_MOD}.RunTree" +_PATCH_IN_WORKFLOW = f"{_MOD}.temporalio.workflow.in_workflow" +_PATCH_IS_REPLAYING = f"{_MOD}.temporalio.workflow.unsafe.is_replaying_history_events" +_PATCH_WF_INFO = f"{_MOD}.temporalio.workflow.info" +_PATCH_SANDBOX = f"{_MOD}.temporalio.workflow.unsafe.sandbox_unrestricted" +_PATCH_TRACING_CTX = f"{_MOD}.tracing_context" +_PATCH_EXTRACT_NEXUS = f"{_MOD}._extract_nexus_context" +_PATCH_INJECT_NEXUS = f"{_MOD}._inject_nexus_context" +_PATCH_GET_CURRENT_RUN = f"{_MOD}.get_current_run_tree" + + +def _make_mock_run() -> MagicMock: + """Create a mock RunTree with working to_headers() for _inject_context.""" + mock_run = MagicMock() + mock_run.to_headers.return_value = {"langsmith-trace": "test-trace-id"} + return mock_run + + +def _mock_workflow_info(**overrides: Any) -> MagicMock: + """Create a mock workflow Info object.""" + info = MagicMock() + info.workflow_id = overrides.get("workflow_id", "test-wf-id") + info.run_id = overrides.get("run_id", "test-run-id") + info.workflow_type = overrides.get("workflow_type", "TestWorkflow") + return info + + +def _mock_activity_info(**overrides: Any) -> MagicMock: + """Create a mock activity Info object.""" + info = MagicMock() + info.workflow_id = overrides.get("workflow_id", "test-wf-id") + info.workflow_run_id = overrides.get("workflow_run_id", "test-run-id") + info.activity_id = overrides.get("activity_id", "test-activity-id") + info.activity_type = overrides.get("activity_type", "test_activity") + return info + + +def _get_runtree_name(MockRunTree: MagicMock) -> str: + """Extract the 'name' kwarg from RunTree constructor call.""" + MockRunTree.assert_called_once() + return MockRunTree.call_args.kwargs["name"] + + +def _get_runtree_metadata(MockRunTree: MagicMock) -> dict[str, Any]: + """Extract metadata from RunTree constructor kwargs. + + The design stores metadata in the 'extra' kwarg as {"metadata": {...}}. + """ + MockRunTree.assert_called_once() + kwargs = MockRunTree.call_args.kwargs + extra = kwargs.get("extra", {}) + if extra and "metadata" in extra: + return extra["metadata"] + # Alternatively, metadata might be passed directly + return kwargs.get("metadata", {}) + + +# =================================================================== +# TestContextPropagation +# =================================================================== + + +class TestContextPropagation: + """Tests for _inject_context / _extract_context roundtrip.""" + + @patch(_PATCH_RUNTREE) + def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: + """Inject a mock run tree's headers, then extract. Verify roundtrip.""" + mock_run = MagicMock() + mock_run.to_headers.return_value = { + "langsmith-trace": "test-trace-id", + "parent": "abc-123", + } + + headers: dict[str, Payload] = {} + result = _inject_context(headers, mock_run) + + assert HEADER_KEY in result + + # Mock from_headers for extraction (real one needs valid LangSmith header format) + mock_extracted = MagicMock() + MockRunTree.from_headers.return_value = mock_extracted + + extracted = _extract_context(result) + # extracted should be a ReplaySafeRunTree wrapping the reconstructed run + assert isinstance(extracted, ReplaySafeRunTree) + assert extracted._run is mock_extracted + MockRunTree.from_headers.assert_called_once() + + def test_extract_missing_header(self) -> None: + """When the _temporal-langsmith-context header is absent, returns None.""" + headers: dict[str, Payload] = {} + result = _extract_context(headers) + assert result is None + + def test_inject_preserves_existing_headers(self) -> None: + """Injecting LangSmith context does not overwrite other existing headers.""" + mock_run = MagicMock() + mock_run.to_headers.return_value = {"langsmith-trace": "val"} + + existing_payload = Payload(data=b"existing") + headers: dict[str, Payload] = {"my-header": existing_payload} + result = _inject_context(headers, mock_run) + + assert "my-header" in result + assert result["my-header"] is existing_payload + assert HEADER_KEY in result + + +# =================================================================== +# TestReplaySafety +# =================================================================== + + +class TestReplaySafety: + """Tests for replay-safe tracing behavior.""" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=True) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_replay_noop_post_end_patch( + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any + ) -> None: + """During replay, RunTree is created but post/end/patch are no-ops.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert isinstance(run, ReplaySafeRunTree) + assert run._run is mock_run + # RunTree IS created (wrapped in ReplaySafeRunTree) + MockRunTree.assert_called_once() + # But post/end/patch are no-ops during replay + mock_run.post.assert_not_called() + mock_run.end.assert_not_called() + mock_run.patch.assert_not_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_create_trace_when_not_replaying( + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any + ) -> None: + """When not replaying (but in workflow), _maybe_run creates a ReplaySafeRunTree.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert isinstance(run, ReplaySafeRunTree) + assert run._run is mock_run + MockRunTree.assert_called_once() + assert MockRunTree.call_args.kwargs["name"] == "TestRun" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_create_trace_outside_workflow( + self, _mock_in_wf: Any, MockRunTree: Any + ) -> None: + """Outside workflow (client/activity), RunTree IS created.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert isinstance(run, ReplaySafeRunTree) + assert run._run is mock_run + MockRunTree.assert_called_once() + + +# =================================================================== +# TestErrorHandling +# =================================================================== + + +class TestErrorHandling: + """Tests for _maybe_run error handling.""" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_exception_marks_run_errored( + self, _mock_in_wf: Any, MockRunTree: Any + ) -> None: + """RuntimeError marks the run as errored and re-raises.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(RuntimeError, match="boom"): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is not None + assert run._run is mock_run + raise RuntimeError("boom") + # run.end should have been called with error containing "boom" + mock_run.end.assert_called() + end_kwargs = mock_run.end.call_args.kwargs + assert end_kwargs["error"] == "RuntimeError: boom" + mock_run.patch.assert_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_benign_application_error_not_marked( + self, _mock_in_wf: Any, MockRunTree: Any + ) -> None: + """Benign ApplicationError does not mark the run as errored.""" + from temporalio.exceptions import ApplicationError, ApplicationErrorCategory + + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(ApplicationError): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is not None + assert run._run is mock_run + raise ApplicationError( + "benign", + category=ApplicationErrorCategory.BENIGN, + ) + # run.end should NOT have been called with error= + end_calls = mock_run.end.call_args_list + for c in end_calls: + assert "error" not in (c.kwargs or {}) + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_non_benign_application_error_marked( + self, _mock_in_wf: Any, MockRunTree: Any + ) -> None: + """Non-benign ApplicationError marks the run as errored.""" + from temporalio.exceptions import ApplicationError + + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(ApplicationError): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is not None + assert run._run is mock_run + raise ApplicationError("bad", non_retryable=True) + mock_run.end.assert_called() + end_kwargs = mock_run.end.call_args.kwargs + assert end_kwargs["error"] == "ApplicationError: bad" + mock_run.patch.assert_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_success_completes_normally( + self, _mock_in_wf: Any, MockRunTree: Any + ) -> None: + """On success, run.end(outputs={"status": "ok"}) and run.patch() are called.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is not None + assert run._run is mock_run + mock_run.end.assert_called_once() + end_kwargs = mock_run.end.call_args.kwargs + assert end_kwargs.get("outputs") == {"status": "ok"} + mock_run.patch.assert_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_cancelled_error_propagates_without_marking_run( + self, _mock_in_wf: Any, MockRunTree: Any + ) -> None: + """CancelledError (BaseException) propagates without marking run as errored. + + _maybe_run catches Exception only, so CancelledError bypasses error marking. + """ + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(asyncio.CancelledError): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is not None + assert run._run is mock_run + raise asyncio.CancelledError() + # run.end should NOT have been called with error= + end_calls = mock_run.end.call_args_list + for c in end_calls: + assert "error" not in (c.kwargs or {}) + + +# =================================================================== +# TestClientOutboundInterceptor +# =================================================================== + + +class TestClientOutboundInterceptor: + """Tests for _LangSmithClientOutboundInterceptor.""" + + def _make_client_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + """Create a client outbound interceptor with a mock next.""" + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) + mock_next = MagicMock() + mock_next.start_workflow = AsyncMock() + mock_next.query_workflow = AsyncMock() + mock_next.signal_workflow = AsyncMock() + mock_next.start_workflow_update = AsyncMock() + mock_next.start_update_with_start_workflow = AsyncMock() + interceptor = config.intercept_client(mock_next) + return interceptor, mock_next + + @pytest.mark.parametrize( + "method,input_attrs,expected_name", + [ + ( + "start_workflow", + {"workflow": "MyWorkflow", "start_signal": None}, + "StartWorkflow:MyWorkflow", + ), + ( + "start_workflow", + {"workflow": "MyWorkflow", "start_signal": "my_signal"}, + "SignalWithStartWorkflow:MyWorkflow", + ), + ("query_workflow", {"query": "get_status"}, "QueryWorkflow:get_status"), + ("signal_workflow", {"signal": "my_signal"}, "SignalWorkflow:my_signal"), + ( + "start_workflow_update", + {"update": "my_update"}, + "StartWorkflowUpdate:my_update", + ), + ], + ids=["start_workflow", "signal_with_start", "query", "signal", "update"], + ) + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + async def test_creates_trace_and_injects_headers( + self, + MockRunTree: Any, + method: str, + input_attrs: dict[str, Any], + expected_name: str, + ) -> None: + """Each client method creates the correct trace and injects headers.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_client_interceptor() + mock_input = MagicMock() + for k, v in input_attrs.items(): + setattr(mock_input, k, v) + mock_input.headers = {} + + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_run): + await getattr(interceptor, method)(mock_input) + + assert _get_runtree_name(MockRunTree) == expected_name + assert HEADER_KEY in mock_input.headers + getattr(mock_next, method).assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + async def test_start_update_with_start_workflow(self, MockRunTree: Any) -> None: + """start_update_with_start_workflow injects headers into BOTH start and update inputs.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_client_interceptor() + mock_input = MagicMock() + mock_input.start_workflow_input = MagicMock() + mock_input.start_workflow_input.workflow = "MyWorkflow" + mock_input.start_workflow_input.headers = {} + mock_input.update_workflow_input = MagicMock() + mock_input.update_workflow_input.headers = {} + + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_run): + await interceptor.start_update_with_start_workflow(mock_input) + + assert ( + _get_runtree_name(MockRunTree) == "StartUpdateWithStartWorkflow:MyWorkflow" + ) + assert HEADER_KEY in mock_input.start_workflow_input.headers + assert HEADER_KEY in mock_input.update_workflow_input.headers + mock_next.start_update_with_start_workflow.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_GET_CURRENT_RUN, return_value=None) + @patch(_PATCH_RUNTREE) + async def test_add_temporal_runs_false_skips_trace( + self, MockRunTree: Any, mock_get_current: Any + ) -> None: + """With add_temporal_runs=False and no ambient context, no run is created + and no headers are injected. + + _inject_current_context() is called unconditionally, but + get_current_run_tree() returns None so headers are unchanged. + """ + interceptor, mock_next = self._make_client_interceptor(add_temporal_runs=False) + mock_input = MagicMock() + mock_input.workflow = "MyWorkflow" + mock_input.start_signal = None + mock_input.headers = {} + + await interceptor.start_workflow(mock_input) + + # RunTree should NOT be created + MockRunTree.assert_not_called() + # _inject_current_context was called but found no ambient context + mock_get_current.assert_called_once() + # Headers should NOT have been modified (no ambient context) + assert HEADER_KEY not in mock_input.headers + # super() should still be called + mock_next.start_workflow.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + async def test_add_temporal_runs_false_with_ambient_context( + self, MockRunTree: Any + ) -> None: + """With add_temporal_runs=False but user-provided ambient context, + no run is created but the ambient context IS injected into headers. + + This verifies that context propagation works even without plugin-created + runs — if the user wraps the call in langsmith.trace(), that context + gets propagated through Temporal headers. + """ + mock_ambient_run = _make_mock_run() + interceptor, mock_next = self._make_client_interceptor(add_temporal_runs=False) + mock_input = MagicMock() + mock_input.workflow = "MyWorkflow" + mock_input.start_signal = None + mock_input.headers = {} + + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_ambient_run): + await interceptor.start_workflow(mock_input) + + # RunTree should NOT be created (no Temporal run) + MockRunTree.assert_not_called() + # But headers SHOULD be injected from the ambient context + assert HEADER_KEY in mock_input.headers + mock_next.start_workflow.assert_called_once() + + +# =================================================================== +# TestActivityInboundInterceptor +# =================================================================== + + +class TestActivityInboundInterceptor: + """Tests for _LangSmithActivityInboundInterceptor.""" + + def _make_activity_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) + mock_next = MagicMock() + mock_next.execute_activity = AsyncMock(return_value="activity_result") + interceptor = config.intercept_activity(mock_next) + return interceptor, mock_next + + @pytest.mark.asyncio + @patch(_PATCH_TRACING_CTX) + @patch(_PATCH_RUNTREE) + @patch("temporalio.activity.info") + async def test_execute_activity_creates_run_with_context_and_metadata( + self, mock_info_fn: Any, MockRunTree: Any, mock_tracing_ctx: Any + ) -> None: + """Activity execution creates a correctly named run with metadata and parent context.""" + mock_info_fn.return_value = _mock_activity_info( + activity_type="do_thing", + workflow_id="wf-123", + workflow_run_id="run-456", + activity_id="act-789", + ) + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_activity_interceptor() + + mock_input = MagicMock() + mock_input.headers = {} + + result = await interceptor.execute_activity(mock_input) + + # Verify trace name and run_type + assert _get_runtree_name(MockRunTree) == "RunActivity:do_thing" + assert MockRunTree.call_args.kwargs.get("run_type") == "tool" + # Verify metadata + metadata = _get_runtree_metadata(MockRunTree) + assert metadata["temporalWorkflowID"] == "wf-123" + assert metadata["temporalRunID"] == "run-456" + assert metadata["temporalActivityID"] == "act-789" + # Verify tracing_context sets parent (wrapped in ReplaySafeRunTree) + mock_tracing_ctx.assert_called() + ctx_kwargs = mock_tracing_ctx.call_args.kwargs + parent = ctx_kwargs.get("parent") + assert isinstance(parent, ReplaySafeRunTree) + assert parent._run is mock_run + # Verify super() called and result passed through + mock_next.execute_activity.assert_called_once() + assert result == "activity_result" + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + @patch("temporalio.activity.info") + async def test_execute_activity_no_header( + self, mock_info_fn: Any, MockRunTree: Any + ) -> None: + """When no LangSmith header is present, activity still executes (no parent, no crash).""" + mock_info_fn.return_value = _mock_activity_info() + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, _mock_next = self._make_activity_interceptor() + + mock_input = MagicMock() + mock_input.headers = {} # No LangSmith header + + result = await interceptor.execute_activity(mock_input) + + # Should still create a run (just without a parent) + MockRunTree.assert_called_once() + assert MockRunTree.call_args.kwargs.get("parent_run") is None + assert result == "activity_result" + + +# =================================================================== +# TestWorkflowInboundInterceptor +# =================================================================== + + +class TestWorkflowInboundInterceptor: + """Tests for _LangSmithWorkflowInboundInterceptor.""" + + def _make_workflow_interceptors( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + """Create workflow inbound interceptor and a mock next.""" + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) + mock_next = MagicMock() + mock_next.execute_workflow = AsyncMock(return_value="wf_result") + mock_next.handle_signal = AsyncMock() + mock_next.handle_query = AsyncMock(return_value="query_result") + mock_next.handle_update_validator = MagicMock() + mock_next.handle_update_handler = AsyncMock(return_value="update_result") + + # Get the workflow interceptor class + wf_class_input = MagicMock() + wf_interceptor_cls = config.workflow_interceptor_class(wf_class_input) + assert wf_interceptor_cls is not None + + # Instantiate with mock next + wf_interceptor = wf_interceptor_cls(mock_next) + + # Initialize with mock outbound + mock_outbound = MagicMock() + wf_interceptor.init(mock_outbound) + + return wf_interceptor, mock_next + + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_WF_INFO) + async def test_execute_workflow( + self, + mock_wf_info: Any, + _mock_in_wf: Any, + _mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + ) -> None: + """execute_workflow creates a run named RunWorkflow:{workflow_type}.""" + mock_wf_info.return_value = _mock_workflow_info(workflow_type="MyWorkflow") + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_workflow_interceptors() + + mock_input = MagicMock() + mock_input.headers = {} + + result = await interceptor.execute_workflow(mock_input) + + # Verify trace name + assert _get_runtree_name(MockRunTree) == "RunWorkflow:MyWorkflow" + # Verify metadata includes workflow ID and run ID + metadata = _get_runtree_metadata(MockRunTree) + assert metadata == { + "temporalWorkflowID": "test-wf-id", + "temporalRunID": "test-run-id", + } + # Verify sandbox_unrestricted was called (for post/patch inside workflow) + mock_sandbox.assert_called() + # Verify super() called and result passed through + mock_next.execute_workflow.assert_called_once() + assert result == "wf_result" + + @pytest.mark.parametrize( + "method,input_attr,input_val,expected_name", + [ + ("handle_signal", "signal", "my_signal", "HandleSignal:my_signal"), + ("handle_query", "query", "get_status", "HandleQuery:get_status"), + ( + "handle_update_validator", + "update", + "my_update", + "ValidateUpdate:my_update", + ), + ("handle_update_handler", "update", "my_update", "HandleUpdate:my_update"), + ], + ids=["signal", "query", "validator", "update_handler"], + ) + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_WF_INFO) + async def test_handler_creates_trace( + self, + mock_wf_info: Any, + _mock_in_wf: Any, + _mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + method: str, + input_attr: str, + input_val: str, + expected_name: str, + ) -> None: + """Each workflow handler creates the correct trace name.""" + mock_wf_info.return_value = _mock_workflow_info() + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_workflow_interceptors() + + mock_input = MagicMock() + setattr(mock_input, input_attr, input_val) + mock_input.headers = {} + + result = getattr(interceptor, method)(mock_input) + if asyncio.iscoroutine(result): + await result + + assert _get_runtree_name(MockRunTree) == expected_name + mock_sandbox.assert_called() + getattr(mock_next, method).assert_called_once() + + +# =================================================================== +# TestWorkflowOutboundInterceptor +# =================================================================== + + +class TestWorkflowOutboundInterceptor: + """Tests for _LangSmithWorkflowOutboundInterceptor.""" + + def _make_outbound_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock, Any]: + """Create outbound interceptor with mock next and inbound reference. + + Returns (outbound_interceptor, mock_next, inbound_interceptor). + """ + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) + + # Create mock next for inbound + mock_inbound_next = MagicMock() + mock_inbound_next.execute_workflow = AsyncMock() + mock_inbound_next.handle_signal = AsyncMock() + mock_inbound_next.handle_query = AsyncMock() + mock_inbound_next.handle_update_validator = MagicMock() + mock_inbound_next.handle_update_handler = AsyncMock() + + # Create inbound interceptor + wf_class_input = MagicMock() + wf_interceptor_cls = config.workflow_interceptor_class(wf_class_input) + inbound = wf_interceptor_cls(mock_inbound_next) + + # Create mock outbound next + mock_outbound_next = MagicMock() + mock_outbound_next.start_activity = MagicMock() + mock_outbound_next.start_local_activity = MagicMock() + mock_outbound_next.start_child_workflow = AsyncMock() + mock_outbound_next.signal_child_workflow = AsyncMock() + mock_outbound_next.signal_external_workflow = AsyncMock() + mock_outbound_next.continue_as_new = MagicMock() + mock_outbound_next.start_nexus_operation = AsyncMock() + + # Initialize inbound (which should create the outbound) + inbound.init(mock_outbound_next) + + # Create the outbound directly for unit testing + from temporalio.contrib.langsmith._interceptor import ( + _LangSmithWorkflowOutboundInterceptor, + ) + + outbound = _LangSmithWorkflowOutboundInterceptor( + mock_outbound_next, config, inbound + ) + + # Set a current run on the inbound to simulate active workflow execution + mock_current_run = _make_mock_run() + inbound._current_run = mock_current_run + + return outbound, mock_outbound_next, inbound + + @pytest.mark.parametrize( + "method,input_attr,input_val,expected_name", + [ + ("start_activity", "activity", "do_thing", "StartActivity:do_thing"), + ( + "start_local_activity", + "activity", + "local_thing", + "StartActivity:local_thing", + ), + ( + "start_child_workflow", + "workflow", + "ChildWorkflow", + "StartChildWorkflow:ChildWorkflow", + ), + ( + "signal_child_workflow", + "signal", + "child_signal", + "SignalChildWorkflow:child_signal", + ), + ( + "signal_external_workflow", + "signal", + "ext_signal", + "SignalExternalWorkflow:ext_signal", + ), + ], + ids=[ + "activity", + "local_activity", + "child_workflow", + "signal_child", + "signal_external", + ], + ) + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + async def test_creates_trace_and_injects_headers( + self, + _mock_in_wf: Any, + _mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + method: str, + input_attr: str, + input_val: str, + expected_name: str, + ) -> None: + """Each outbound method creates the correct trace and injects headers.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + outbound, mock_next, _ = self._make_outbound_interceptor() + + mock_input = MagicMock() + setattr(mock_input, input_attr, input_val) + mock_input.headers = {} + + result = getattr(outbound, method)(mock_input) + if asyncio.iscoroutine(result): + await result + + assert _get_runtree_name(MockRunTree) == expected_name + assert HEADER_KEY in mock_input.headers + mock_sandbox.assert_called() + getattr(mock_next, method).assert_called_once() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_continue_as_new( + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any + ) -> None: + """continue_as_new does NOT create a new trace, but injects context from current run.""" + outbound, mock_next, _inbound = self._make_outbound_interceptor() + + mock_input = MagicMock() + mock_input.headers = {} + + outbound.continue_as_new(mock_input) + + # No new RunTree should be created for continue_as_new + MockRunTree.assert_not_called() + # But headers SHOULD be modified (context from inbound's _current_run) + assert HEADER_KEY in mock_input.headers + mock_next.continue_as_new.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + async def test_start_nexus_operation( + self, + _mock_in_wf: Any, + _mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + ) -> None: + """start_nexus_operation creates a trace named StartNexusOperation:{service}/{operation}.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + outbound, mock_next, _ = self._make_outbound_interceptor() + + mock_input = MagicMock() + mock_input.service = "MyService" + mock_input.operation_name = "do_op" + mock_input.headers = {} + + await outbound.start_nexus_operation(mock_input) + + assert _get_runtree_name(MockRunTree) == "StartNexusOperation:MyService/do_op" + # Nexus uses string headers, so context injection uses _inject_nexus_context + # The headers dict should be modified + mock_sandbox.assert_called() + mock_next.start_nexus_operation.assert_called_once() + + +# =================================================================== +# TestNexusInboundInterceptor +# =================================================================== + + +class TestNexusInboundInterceptor: + """Tests for _LangSmithNexusOperationInboundInterceptor.""" + + def _make_nexus_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) + mock_next = MagicMock() + mock_next.execute_nexus_operation_start = AsyncMock() + mock_next.execute_nexus_operation_cancel = AsyncMock() + interceptor = config.intercept_nexus_operation(mock_next) + return interceptor, mock_next + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + @patch(_PATCH_EXTRACT_NEXUS) + async def test_execute_nexus_operation_start( + self, mock_extract_nexus: Any, MockRunTree: Any + ) -> None: + """Creates a run named RunStartNexusOperationHandler:{service}/{operation}. + + Uses _extract_nexus_context (not _extract_context) for Nexus string headers. + """ + mock_extract_nexus.return_value = None # no parent + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_nexus_interceptor() + + mock_input = MagicMock() + mock_input.ctx = MagicMock() + mock_input.ctx.service = "MyService" + mock_input.ctx.operation = "start_op" + mock_input.ctx.headers = {} + + await interceptor.execute_nexus_operation_start(mock_input) + + # Verify _extract_nexus_context was called (not _extract_context) + mock_extract_nexus.assert_called_once_with(mock_input.ctx.headers) + # Verify trace name + assert ( + _get_runtree_name(MockRunTree) + == "RunStartNexusOperationHandler:MyService/start_op" + ) + # Verify run_type is "tool" for Nexus operations + assert MockRunTree.call_args.kwargs.get("run_type") == "tool" + mock_next.execute_nexus_operation_start.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + @patch(_PATCH_EXTRACT_NEXUS) + async def test_execute_nexus_operation_cancel( + self, mock_extract_nexus: Any, MockRunTree: Any + ) -> None: + """Creates a run named RunCancelNexusOperationHandler:{service}/{operation}. + + Uses _extract_nexus_context for context extraction. + """ + mock_extract_nexus.return_value = None + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_nexus_interceptor() + + mock_input = MagicMock() + mock_input.ctx = MagicMock() + mock_input.ctx.service = "MyService" + mock_input.ctx.operation = "cancel_op" + mock_input.ctx.headers = {} + + await interceptor.execute_nexus_operation_cancel(mock_input) + + mock_extract_nexus.assert_called_once_with(mock_input.ctx.headers) + assert ( + _get_runtree_name(MockRunTree) + == "RunCancelNexusOperationHandler:MyService/cancel_op" + ) + assert MockRunTree.call_args.kwargs.get("run_type") == "tool" + mock_next.execute_nexus_operation_cancel.assert_called_once() + + +# =================================================================== +# TestLazyClientPrevention +# =================================================================== + + +class TestLazyClientPrevention: + """Tests that RunTree always receives ls_client= to prevent lazy Client creation.""" + + @patch(_PATCH_IN_WORKFLOW, return_value=False) + @patch(_PATCH_RUNTREE) + def test_runtree_always_receives_ls_client( + self, MockRunTree: Any, _mock_in_wf: Any + ) -> None: + """Every RunTree() created by _maybe_run receives ls_client= (pre-created client).""" + mock_client = MagicMock() + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ): + pass + + MockRunTree.assert_called_once() + call_kwargs = MockRunTree.call_args.kwargs + assert "ls_client" in call_kwargs + assert call_kwargs["ls_client"] is mock_client + + +# =================================================================== +# TestAddTemporalRunsToggle +# =================================================================== + + +class TestAddTemporalRunsToggle: + """Tests for the add_temporal_runs toggle.""" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_false_skips_traces(self, _mock_in_wf: Any, MockRunTree: Any) -> None: + """With add_temporal_runs=False, _maybe_run yields None (no run created). + + Callers are responsible for propagating context even when the run is None. + See test_false_still_propagates_context for the full behavior. + """ + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=False, + ) as run: + assert run is None + MockRunTree.assert_not_called() + + @pytest.mark.asyncio + @patch(_PATCH_TRACING_CTX) + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_WF_INFO) + @patch(f"{_MOD}.temporalio.activity.info") + async def test_false_still_propagates_context( + self, + mock_act_info: Any, + mock_wf_info: Any, + _mock_in_wf: Any, + _mock_replaying: Any, + MockRunTree: Any, + _mock_sandbox: Any, + mock_tracing_ctx: Any, + ) -> None: + """With add_temporal_runs=False, no runs are created but context still propagates. + + 1. Workflow outbound: injects the inbound's _current_run (parent fallback) + into headers even though no StartActivity run is created. + 2. Activity inbound: sets tracing_context(parent=extracted_parent) + unconditionally (before _maybe_run), so @traceable code nests correctly + even without a RunActivity run. + """ + from temporalio.contrib.langsmith._interceptor import ( + _LangSmithWorkflowOutboundInterceptor, + ) + + mock_wf_info.return_value = _mock_workflow_info() + mock_act_info.return_value = _mock_activity_info() + + # --- Workflow outbound: context propagation without run creation --- + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=False) + + # Create inbound interceptor + wf_class_input = MagicMock() + wf_interceptor_cls = config.workflow_interceptor_class(wf_class_input) + mock_inbound_next = MagicMock() + mock_inbound_next.execute_workflow = AsyncMock() + inbound = wf_interceptor_cls(mock_inbound_next) + + # Create outbound interceptor + mock_outbound_next = MagicMock() + mock_outbound_next.start_activity = MagicMock() + inbound.init(mock_outbound_next) + outbound = _LangSmithWorkflowOutboundInterceptor( + mock_outbound_next, config, inbound + ) + + # Simulate an inbound parent context (as if extracted from headers) + mock_parent = _make_mock_run() + inbound._current_run = mock_parent + + mock_input = MagicMock() + mock_input.activity = "do_thing" + mock_input.headers = {} + + outbound.start_activity(mock_input) + + # No RunTree should be created (add_temporal_runs=False) + MockRunTree.assert_not_called() + # But headers SHOULD be injected from the inbound's parent context + assert HEADER_KEY in mock_input.headers + mock_outbound_next.start_activity.assert_called_once() + + # --- Activity inbound: tracing_context with extracted parent --- + MockRunTree.reset_mock() + mock_tracing_ctx.reset_mock() + + mock_act_next = MagicMock() + mock_act_next.execute_activity = AsyncMock(return_value="result") + act_interceptor = config.intercept_activity(mock_act_next) + + mock_act_input = MagicMock() + mock_extracted_parent = _make_mock_run() + + with patch(f"{_MOD}._extract_context", return_value=mock_extracted_parent): + await act_interceptor.execute_activity(mock_act_input) + + # No RunTree should be created (add_temporal_runs=False) + MockRunTree.assert_not_called() + # tracing_context SHOULD be called with the client and extracted parent + # (unconditionally, before _maybe_run) + mock_tracing_ctx.assert_called_once_with( + client=config._client, enabled=True, parent=mock_extracted_parent + ) + mock_act_next.execute_activity.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_TRACING_CTX) + @patch(_PATCH_RUNTREE) + @patch(f"{_MOD}.temporalio.activity.info") + async def test_false_activity_no_parent_no_context( + self, + mock_act_info: Any, + MockRunTree: Any, + mock_tracing_ctx: Any, + ) -> None: + """With add_temporal_runs=False and no parent in headers, tracing_context + is still called with the client (so @traceable can use it), but no parent. + """ + mock_act_info.return_value = _mock_activity_info() + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=False) + + mock_act_next = MagicMock() + mock_act_next.execute_activity = AsyncMock(return_value="result") + act_interceptor = config.intercept_activity(mock_act_next) + + mock_act_input = MagicMock() + + with patch(f"{_MOD}._extract_context", return_value=None): + await act_interceptor.execute_activity(mock_act_input) + + MockRunTree.assert_not_called() + # tracing_context called with client and enabled (no parent) + mock_tracing_ctx.assert_called_once_with(client=config._client, enabled=True) + mock_act_next.execute_activity.assert_called_once() diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py new file mode 100644 index 000000000..f902d20ef --- /dev/null +++ b/tests/contrib/langsmith/test_plugin.py @@ -0,0 +1,146 @@ +"""Tests for LangSmithPlugin construction, configuration, and end-to-end usage.""" + +from __future__ import annotations + +import uuid +from unittest.mock import MagicMock + +import pytest +from langsmith import traceable, tracing_context + +from temporalio.client import Client +from temporalio.contrib.langsmith import LangSmithInterceptor, LangSmithPlugin +from temporalio.testing import WorkflowEnvironment +from tests.contrib.langsmith.conftest import dump_runs +from tests.contrib.langsmith.test_integration import ( + ComprehensiveWorkflow, + NexusService, + SimpleNexusWorkflow, + TraceableActivityWorkflow, + _make_client_and_collector, + _poll_query, + nested_traceable_activity, + traceable_activity, +) +from tests.helpers import new_worker +from tests.helpers.nexus import make_nexus_endpoint_name + + +class TestPluginConstruction: + """Tests for LangSmithPlugin construction.""" + + def test_construction_stores_all_config(self) -> None: + """All constructor kwargs are stored on the interceptor.""" + mock_client = MagicMock() + plugin = LangSmithPlugin( + client=mock_client, + project_name="my-project", + add_temporal_runs=False, + metadata={"env": "prod"}, + tags=["v1"], + ) + assert plugin.interceptors is not None + assert len(plugin.interceptors) > 0 + interceptor = plugin.interceptors[0] + assert isinstance(interceptor, LangSmithInterceptor) + assert interceptor._client is mock_client + assert interceptor._project_name == "my-project" + assert interceptor._add_temporal_runs is False + assert interceptor._default_metadata == {"env": "prod"} + assert interceptor._default_tags == ["v1"] + + +class TestPluginIntegration: + """End-to-end test using LangSmithPlugin as a Temporal client plugin.""" + + async def test_comprehensive_plugin_trace_hierarchy( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """Plugin wired to a real Temporal worker produces the full trace hierarchy.""" + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + + temporal_client, collector, mock_ls_client = _make_client_and_collector( + client, add_temporal_runs=True + ) + + @traceable(name="user_pipeline") + async def user_pipeline() -> str: + async with new_worker( + temporal_client, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + max_cached_workflows=0, + ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) + workflow_id = f"plugin-comprehensive-{uuid.uuid4()}" + handle = await temporal_client.start_workflow( + ComprehensiveWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + ) + # Poll via raw client to avoid creating trace runs + raw_handle = client.get_workflow_handle(workflow_id) + assert await _poll_query( + raw_handle, + ComprehensiveWorkflow.is_waiting_for_signal, + expected=True, + ), "Workflow never reached signal wait point" + await handle.query(ComprehensiveWorkflow.my_query) + await handle.signal(ComprehensiveWorkflow.my_signal, "hello") + await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") + return await handle.result() + + with tracing_context(client=mock_ls_client, enabled=True): + result = await user_pipeline() + + assert result == "comprehensive-done" + + hierarchy = dump_runs(collector) + expected = [ + "user_pipeline", + " StartWorkflow:ComprehensiveWorkflow", + " RunWorkflow:ComprehensiveWorkflow", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " QueryWorkflow:my_query", + " HandleQuery:my_query", + " SignalWorkflow:my_signal", + " HandleSignal:my_signal", + " StartWorkflowUpdate:my_update", + " ValidateUpdate:my_update", + " HandleUpdate:my_update", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" diff --git a/uv.lock b/uv.lock index 9921726a0..2e1355303 100644 --- a/uv.lock +++ b/uv.lock @@ -2105,6 +2105,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, ] +[[package]] +name = "langsmith" +version = "0.7.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson", marker = "platform_python_implementation != 'PyPy'" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "requests-toolbelt" }, + { name = "uuid-utils" }, + { name = "xxhash" }, + { name = "zstandard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/79/81041dde07a974e728db7def23c1c7255950b8874102925cc77093bc847d/langsmith-0.7.17.tar.gz", hash = "sha256:6c1b0c2863cdd6636d2a58b8d5b1b80060703d98cac2593f4233e09ac25b5a9d", size = 1132228, upload-time = "2026-03-12T20:41:10.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/31/62689d57f4d25792bd6a3c05c868771899481be2f3e31f9e71d31e1ac4ab/langsmith-0.7.17-py3-none-any.whl", hash = "sha256:cbec10460cb6c6ecc94c18c807be88a9984838144ae6c4693c9f859f378d7d02", size = 359147, upload-time = "2026-03-12T20:41:08.758Z" }, +] + [[package]] name = "litellm" version = "1.78.0" @@ -3022,6 +3042,87 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/90/68152b7465f50285d3ce2481b3aec2f82822e3f52e5152eeeaf516bab841/opentelemetry_semantic_conventions-0.58b0-py3-none-any.whl", hash = "sha256:5564905ab1458b96684db1340232729fce3b5375a06e140e8904c78e4f815b28", size = 207954, upload-time = "2025-09-11T10:28:59.218Z" }, ] +[[package]] +name = "orjson" +version = "3.11.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/45/b268004f745ede84e5798b48ee12b05129d19235d0e15267aa57dcdb400b/orjson-3.11.7.tar.gz", hash = "sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49", size = 6144992, upload-time = "2026-02-02T15:38:49.29Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174", size = 229140, upload-time = "2026-02-02T15:37:06.082Z" }, + { url = "https://files.pythonhosted.org/packages/52/a2/fa129e749d500f9b183e8a3446a193818a25f60261e9ce143ad61e975208/orjson-3.11.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67", size = 128670, upload-time = "2026-02-02T15:37:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/08/93/1e82011cd1e0bd051ef9d35bed1aa7fb4ea1f0a055dc2c841b46b43a9ebd/orjson-3.11.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11", size = 123832, upload-time = "2026-02-02T15:37:09.191Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d8/a26b431ef962c7d55736674dddade876822f3e33223c1f47a36879350d04/orjson-3.11.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc", size = 129171, upload-time = "2026-02-02T15:37:11.112Z" }, + { url = "https://files.pythonhosted.org/packages/a7/19/f47819b84a580f490da260c3ee9ade214cf4cf78ac9ce8c1c758f80fdfc9/orjson-3.11.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16", size = 141967, upload-time = "2026-02-02T15:37:12.282Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/37ece39a0777ba077fdcdbe4cccae3be8ed00290c14bf8afdc548befc260/orjson-3.11.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222", size = 130991, upload-time = "2026-02-02T15:37:13.465Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa", size = 133674, upload-time = "2026-02-02T15:37:14.694Z" }, + { url = "https://files.pythonhosted.org/packages/c4/6e/baa83e68d1aa09fa8c3e5b2c087d01d0a0bd45256de719ed7bc22c07052d/orjson-3.11.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e", size = 138722, upload-time = "2026-02-02T15:37:16.501Z" }, + { url = "https://files.pythonhosted.org/packages/0c/47/7f8ef4963b772cd56999b535e553f7eb5cd27e9dd6c049baee6f18bfa05d/orjson-3.11.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2", size = 409056, upload-time = "2026-02-02T15:37:17.895Z" }, + { url = "https://files.pythonhosted.org/packages/38/eb/2df104dd2244b3618f25325a656f85cc3277f74bbd91224752410a78f3c7/orjson-3.11.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c", size = 144196, upload-time = "2026-02-02T15:37:19.349Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2a/ee41de0aa3a6686598661eae2b4ebdff1340c65bfb17fcff8b87138aab21/orjson-3.11.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f", size = 134979, upload-time = "2026-02-02T15:37:20.906Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fa/92fc5d3d402b87a8b28277a9ed35386218a6a5287c7fe5ee9b9f02c53fb2/orjson-3.11.7-cp310-cp310-win32.whl", hash = "sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de", size = 127968, upload-time = "2026-02-02T15:37:23.178Z" }, + { url = "https://files.pythonhosted.org/packages/07/29/a576bf36d73d60df06904d3844a9df08e25d59eba64363aaf8ec2f9bff41/orjson-3.11.7-cp310-cp310-win_amd64.whl", hash = "sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993", size = 125128, upload-time = "2026-02-02T15:37:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/37/02/da6cb01fc6087048d7f61522c327edf4250f1683a58a839fdcc435746dd5/orjson-3.11.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c", size = 228664, upload-time = "2026-02-02T15:37:25.542Z" }, + { url = "https://files.pythonhosted.org/packages/c1/c2/5885e7a5881dba9a9af51bc564e8967225a642b3e03d089289a35054e749/orjson-3.11.7-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b", size = 125344, upload-time = "2026-02-02T15:37:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/4e7688de0a92d1caf600dfd5fb70b4c5bfff51dfa61ac555072ef2d0d32a/orjson-3.11.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e", size = 128404, upload-time = "2026-02-02T15:37:28.108Z" }, + { url = "https://files.pythonhosted.org/packages/2f/b2/ec04b74ae03a125db7bd69cffd014b227b7f341e3261bf75b5eb88a1aa92/orjson-3.11.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5", size = 123677, upload-time = "2026-02-02T15:37:30.287Z" }, + { url = "https://files.pythonhosted.org/packages/4c/69/f95bdf960605f08f827f6e3291fe243d8aa9c5c9ff017a8d7232209184c3/orjson-3.11.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62", size = 128950, upload-time = "2026-02-02T15:37:31.595Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1b/de59c57bae1d148ef298852abd31909ac3089cff370dfd4cd84cc99cbc42/orjson-3.11.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910", size = 141756, upload-time = "2026-02-02T15:37:32.985Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9e/9decc59f4499f695f65c650f6cfa6cd4c37a3fbe8fa235a0a3614cb54386/orjson-3.11.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b", size = 130812, upload-time = "2026-02-02T15:37:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/28/e6/59f932bcabd1eac44e334fe8e3281a92eacfcb450586e1f4bde0423728d8/orjson-3.11.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960", size = 133444, upload-time = "2026-02-02T15:37:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/f1/36/b0f05c0eaa7ca30bc965e37e6a2956b0d67adb87a9872942d3568da846ae/orjson-3.11.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8", size = 138609, upload-time = "2026-02-02T15:37:36.657Z" }, + { url = "https://files.pythonhosted.org/packages/b8/03/58ec7d302b8d86944c60c7b4b82975d5161fcce4c9bc8c6cb1d6741b6115/orjson-3.11.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504", size = 408918, upload-time = "2026-02-02T15:37:38.076Z" }, + { url = "https://files.pythonhosted.org/packages/06/3a/868d65ef9a8b99be723bd510de491349618abd9f62c826cf206d962db295/orjson-3.11.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e", size = 143998, upload-time = "2026-02-02T15:37:39.706Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c7/1e18e1c83afe3349f4f6dc9e14910f0ae5f82eac756d1412ea4018938535/orjson-3.11.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561", size = 134802, upload-time = "2026-02-02T15:37:41.002Z" }, + { url = "https://files.pythonhosted.org/packages/d4/0b/ccb7ee1a65b37e8eeb8b267dc953561d72370e85185e459616d4345bab34/orjson-3.11.7-cp311-cp311-win32.whl", hash = "sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d", size = 127828, upload-time = "2026-02-02T15:37:42.241Z" }, + { url = "https://files.pythonhosted.org/packages/af/9e/55c776dffda3f381e0f07d010a4f5f3902bf48eaba1bb7684d301acd4924/orjson-3.11.7-cp311-cp311-win_amd64.whl", hash = "sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471", size = 124941, upload-time = "2026-02-02T15:37:43.444Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/424a620fa7d263b880162505fb107ef5e0afaa765b5b06a88312ac291560/orjson-3.11.7-cp311-cp311-win_arm64.whl", hash = "sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d", size = 126245, upload-time = "2026-02-02T15:37:45.18Z" }, + { url = "https://files.pythonhosted.org/packages/80/bf/76f4f1665f6983385938f0e2a5d7efa12a58171b8456c252f3bae8a4cf75/orjson-3.11.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f", size = 228545, upload-time = "2026-02-02T15:37:46.376Z" }, + { url = "https://files.pythonhosted.org/packages/79/53/6c72c002cb13b5a978a068add59b25a8bdf2800ac1c9c8ecdb26d6d97064/orjson-3.11.7-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b", size = 125224, upload-time = "2026-02-02T15:37:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/2c/83/10e48852865e5dd151bdfe652c06f7da484578ed02c5fca938e3632cb0b8/orjson-3.11.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a", size = 128154, upload-time = "2026-02-02T15:37:48.954Z" }, + { url = "https://files.pythonhosted.org/packages/6e/52/a66e22a2b9abaa374b4a081d410edab6d1e30024707b87eab7c734afe28d/orjson-3.11.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10", size = 123548, upload-time = "2026-02-02T15:37:50.187Z" }, + { url = "https://files.pythonhosted.org/packages/de/38/605d371417021359f4910c496f764c48ceb8997605f8c25bf1dfe58c0ebe/orjson-3.11.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa", size = 129000, upload-time = "2026-02-02T15:37:51.426Z" }, + { url = "https://files.pythonhosted.org/packages/44/98/af32e842b0ffd2335c89714d48ca4e3917b42f5d6ee5537832e069a4b3ac/orjson-3.11.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8", size = 141686, upload-time = "2026-02-02T15:37:52.607Z" }, + { url = "https://files.pythonhosted.org/packages/96/0b/fc793858dfa54be6feee940c1463370ece34b3c39c1ca0aa3845f5ba9892/orjson-3.11.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f", size = 130812, upload-time = "2026-02-02T15:37:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/dc/91/98a52415059db3f374757d0b7f0f16e3b5cd5976c90d1c2b56acaea039e6/orjson-3.11.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad", size = 133440, upload-time = "2026-02-02T15:37:55.615Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b6/cb540117bda61791f46381f8c26c8f93e802892830a6055748d3bb1925ab/orjson-3.11.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867", size = 138386, upload-time = "2026-02-02T15:37:56.814Z" }, + { url = "https://files.pythonhosted.org/packages/63/1a/50a3201c334a7f17c231eee5f841342190723794e3b06293f26e7cf87d31/orjson-3.11.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d", size = 408853, upload-time = "2026-02-02T15:37:58.291Z" }, + { url = "https://files.pythonhosted.org/packages/87/cd/8de1c67d0be44fdc22701e5989c0d015a2adf391498ad42c4dc589cd3013/orjson-3.11.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab", size = 144130, upload-time = "2026-02-02T15:38:00.163Z" }, + { url = "https://files.pythonhosted.org/packages/0f/fe/d605d700c35dd55f51710d159fc54516a280923cd1b7e47508982fbb387d/orjson-3.11.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2", size = 134818, upload-time = "2026-02-02T15:38:01.507Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e4/15ecc67edb3ddb3e2f46ae04475f2d294e8b60c1825fbe28a428b93b3fbd/orjson-3.11.7-cp312-cp312-win32.whl", hash = "sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f", size = 127923, upload-time = "2026-02-02T15:38:02.75Z" }, + { url = "https://files.pythonhosted.org/packages/34/70/2e0855361f76198a3965273048c8e50a9695d88cd75811a5b46444895845/orjson-3.11.7-cp312-cp312-win_amd64.whl", hash = "sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74", size = 125007, upload-time = "2026-02-02T15:38:04.032Z" }, + { url = "https://files.pythonhosted.org/packages/68/40/c2051bd19fc467610fed469dc29e43ac65891571138f476834ca192bc290/orjson-3.11.7-cp312-cp312-win_arm64.whl", hash = "sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5", size = 126089, upload-time = "2026-02-02T15:38:05.297Z" }, + { url = "https://files.pythonhosted.org/packages/89/25/6e0e52cac5aab51d7b6dcd257e855e1dec1c2060f6b28566c509b4665f62/orjson-3.11.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733", size = 228390, upload-time = "2026-02-02T15:38:06.8Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/a77f48d2fc8a05bbc529e5ff481fb43d914f9e383ea2469d4f3d51df3d00/orjson-3.11.7-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4", size = 125189, upload-time = "2026-02-02T15:38:08.181Z" }, + { url = "https://files.pythonhosted.org/packages/89/25/0a16e0729a0e6a1504f9d1a13cdd365f030068aab64cec6958396b9969d7/orjson-3.11.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785", size = 128106, upload-time = "2026-02-02T15:38:09.41Z" }, + { url = "https://files.pythonhosted.org/packages/66/da/a2e505469d60666a05ab373f1a6322eb671cb2ba3a0ccfc7d4bc97196787/orjson-3.11.7-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539", size = 123363, upload-time = "2026-02-02T15:38:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/23/bf/ed73f88396ea35c71b38961734ea4a4746f7ca0768bf28fd551d37e48dd0/orjson-3.11.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1", size = 129007, upload-time = "2026-02-02T15:38:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/73/3c/b05d80716f0225fc9008fbf8ab22841dcc268a626aa550561743714ce3bf/orjson-3.11.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1", size = 141667, upload-time = "2026-02-02T15:38:13.398Z" }, + { url = "https://files.pythonhosted.org/packages/61/e8/0be9b0addd9bf86abfc938e97441dcd0375d494594b1c8ad10fe57479617/orjson-3.11.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705", size = 130832, upload-time = "2026-02-02T15:38:14.698Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ec/c68e3b9021a31d9ec15a94931db1410136af862955854ed5dd7e7e4f5bff/orjson-3.11.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace", size = 133373, upload-time = "2026-02-02T15:38:16.109Z" }, + { url = "https://files.pythonhosted.org/packages/d2/45/f3466739aaafa570cc8e77c6dbb853c48bf56e3b43738020e2661e08b0ac/orjson-3.11.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b", size = 138307, upload-time = "2026-02-02T15:38:17.453Z" }, + { url = "https://files.pythonhosted.org/packages/e1/84/9f7f02288da1ffb31405c1be07657afd1eecbcb4b64ee2817b6fe0f785fa/orjson-3.11.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157", size = 408695, upload-time = "2026-02-02T15:38:18.831Z" }, + { url = "https://files.pythonhosted.org/packages/18/07/9dd2f0c0104f1a0295ffbe912bc8d63307a539b900dd9e2c48ef7810d971/orjson-3.11.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3", size = 144099, upload-time = "2026-02-02T15:38:20.28Z" }, + { url = "https://files.pythonhosted.org/packages/a5/66/857a8e4a3292e1f7b1b202883bcdeb43a91566cf59a93f97c53b44bd6801/orjson-3.11.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223", size = 134806, upload-time = "2026-02-02T15:38:22.186Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5b/6ebcf3defc1aab3a338ca777214966851e92efb1f30dc7fc8285216e6d1b/orjson-3.11.7-cp313-cp313-win32.whl", hash = "sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3", size = 127914, upload-time = "2026-02-02T15:38:23.511Z" }, + { url = "https://files.pythonhosted.org/packages/00/04/c6f72daca5092e3117840a1b1e88dfc809cc1470cf0734890d0366b684a1/orjson-3.11.7-cp313-cp313-win_amd64.whl", hash = "sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757", size = 124986, upload-time = "2026-02-02T15:38:24.836Z" }, + { url = "https://files.pythonhosted.org/packages/03/ba/077a0f6f1085d6b806937246860fafbd5b17f3919c70ee3f3d8d9c713f38/orjson-3.11.7-cp313-cp313-win_arm64.whl", hash = "sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539", size = 126045, upload-time = "2026-02-02T15:38:26.216Z" }, + { url = "https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0", size = 228391, upload-time = "2026-02-02T15:38:27.757Z" }, + { url = "https://files.pythonhosted.org/packages/46/19/e40f6225da4d3aa0c8dc6e5219c5e87c2063a560fe0d72a88deb59776794/orjson-3.11.7-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0", size = 125188, upload-time = "2026-02-02T15:38:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/c4de2babef2c0817fd1f048fd176aa48c37bec8aef53d2fa932983032cce/orjson-3.11.7-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6", size = 128097, upload-time = "2026-02-02T15:38:30.618Z" }, + { url = "https://files.pythonhosted.org/packages/eb/74/233d360632bafd2197f217eee7fb9c9d0229eac0c18128aee5b35b0014fe/orjson-3.11.7-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf", size = 123364, upload-time = "2026-02-02T15:38:32.363Z" }, + { url = "https://files.pythonhosted.org/packages/79/51/af79504981dd31efe20a9e360eb49c15f06df2b40e7f25a0a52d9ae888e8/orjson-3.11.7-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5", size = 129076, upload-time = "2026-02-02T15:38:33.68Z" }, + { url = "https://files.pythonhosted.org/packages/67/e2/da898eb68b72304f8de05ca6715870d09d603ee98d30a27e8a9629abc64b/orjson-3.11.7-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892", size = 141705, upload-time = "2026-02-02T15:38:34.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/89/15364d92acb3d903b029e28d834edb8780c2b97404cbf7929aa6b9abdb24/orjson-3.11.7-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e", size = 130855, upload-time = "2026-02-02T15:38:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8b/ecdad52d0b38d4b8f514be603e69ccd5eacf4e7241f972e37e79792212ec/orjson-3.11.7-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1", size = 133386, upload-time = "2026-02-02T15:38:37.704Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/45e1dcf10e17d0924b7c9162f87ec7b4ca79e28a0548acf6a71788d3e108/orjson-3.11.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183", size = 138295, upload-time = "2026-02-02T15:38:39.096Z" }, + { url = "https://files.pythonhosted.org/packages/63/d7/4d2e8b03561257af0450f2845b91fbd111d7e526ccdf737267108075e0ba/orjson-3.11.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650", size = 408720, upload-time = "2026-02-02T15:38:40.634Z" }, + { url = "https://files.pythonhosted.org/packages/78/cf/d45343518282108b29c12a65892445fc51f9319dc3c552ceb51bb5905ed2/orjson-3.11.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141", size = 144152, upload-time = "2026-02-02T15:38:42.262Z" }, + { url = "https://files.pythonhosted.org/packages/a9/3a/d6001f51a7275aacd342e77b735c71fa04125a3f93c36fee4526bc8c654e/orjson-3.11.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2", size = 134814, upload-time = "2026-02-02T15:38:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d3/f19b47ce16820cc2c480f7f1723e17f6d411b3a295c60c8ad3aa9ff1c96a/orjson-3.11.7-cp314-cp314-win32.whl", hash = "sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576", size = 127997, upload-time = "2026-02-02T15:38:45.06Z" }, + { url = "https://files.pythonhosted.org/packages/12/df/172771902943af54bf661a8d102bdf2e7f932127968080632bda6054b62c/orjson-3.11.7-cp314-cp314-win_amd64.whl", hash = "sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1", size = 124985, upload-time = "2026-02-02T15:38:46.388Z" }, + { url = "https://files.pythonhosted.org/packages/6f/1c/f2a8d8a1b17514660a614ce5f7aac74b934e69f5abc2700cc7ced882a009/orjson-3.11.7-cp314-cp314-win_arm64.whl", hash = "sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d", size = 126038, upload-time = "2026-02-02T15:38:47.703Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -4304,6 +4405,7 @@ dev = [ { name = "googleapis-common-protos" }, { name = "grpcio-tools" }, { name = "httpx" }, + { name = "langsmith" }, { name = "maturin" }, { name = "mypy" }, { name = "mypy-protobuf" }, @@ -4350,6 +4452,7 @@ dev = [ { name = "googleapis-common-protos", specifier = "==1.70.0" }, { name = "grpcio-tools", specifier = ">=1.48.2,<2" }, { name = "httpx", specifier = ">=0.28.1" }, + { name = "langsmith", specifier = ">=0.7.17" }, { name = "maturin", specifier = ">=1.8.2" }, { name = "mypy", specifier = "==1.18.2" }, { name = "mypy-protobuf", specifier = ">=3.3.0,<4" }, @@ -4656,6 +4759,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] +[[package]] +name = "uuid-utils" +version = "0.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/d1/38a573f0c631c062cf42fa1f5d021d4dd3c31fb23e4376e4b56b0c9fbbed/uuid_utils-0.14.1.tar.gz", hash = "sha256:9bfc95f64af80ccf129c604fb6b8ca66c6f256451e32bc4570f760e4309c9b69", size = 22195, upload-time = "2026-02-20T22:50:38.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/b7/add4363039a34506a58457d96d4aa2126061df3a143eb4d042aedd6a2e76/uuid_utils-0.14.1-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:93a3b5dc798a54a1feb693f2d1cb4cf08258c32ff05ae4929b5f0a2ca624a4f0", size = 604679, upload-time = "2026-02-20T22:50:27.469Z" }, + { url = "https://files.pythonhosted.org/packages/dd/84/d1d0bef50d9e66d31b2019997c741b42274d53dde2e001b7a83e9511c339/uuid_utils-0.14.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ccd65a4b8e83af23eae5e56d88034b2fe7264f465d3e830845f10d1591b81741", size = 309346, upload-time = "2026-02-20T22:50:31.857Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ed/b6d6fd52a6636d7c3eddf97d68da50910bf17cd5ac221992506fb56cf12e/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b56b0cacd81583834820588378e432b0696186683b813058b707aedc1e16c4b1", size = 344714, upload-time = "2026-02-20T22:50:42.642Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a7/a19a1719fb626fe0b31882db36056d44fe904dc0cf15b06fdf56b2679cf7/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb3cf14de789097320a3c56bfdfdd51b1225d11d67298afbedee7e84e3837c96", size = 350914, upload-time = "2026-02-20T22:50:36.487Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fc/f6690e667fdc3bb1a73f57951f97497771c56fe23e3d302d7404be394d4f/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e0854a90d67f4b0cc6e54773deb8be618f4c9bad98d3326f081423b5d14fae", size = 482609, upload-time = "2026-02-20T22:50:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/54/6e/dcd3fa031320921a12ec7b4672dea3bd1dd90ddffa363a91831ba834d559/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6743ba194de3910b5feb1a62590cd2587e33a73ab6af8a01b642ceb5055862", size = 345699, upload-time = "2026-02-20T22:50:46.87Z" }, + { url = "https://files.pythonhosted.org/packages/04/28/e5220204b58b44ac0047226a9d016a113fde039280cc8732d9e6da43b39f/uuid_utils-0.14.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:043fb58fde6cf1620a6c066382f04f87a8e74feb0f95a585e4ed46f5d44af57b", size = 372205, upload-time = "2026-02-20T22:50:28.438Z" }, + { url = "https://files.pythonhosted.org/packages/c7/d9/3d2eb98af94b8dfffc82b6a33b4dfc87b0a5de2c68a28f6dde0db1f8681b/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c915d53f22945e55fe0d3d3b0b87fd965a57f5fd15666fd92d6593a73b1dd297", size = 521836, upload-time = "2026-02-20T22:50:23.057Z" }, + { url = "https://files.pythonhosted.org/packages/a8/15/0eb106cc6fe182f7577bc0ab6e2f0a40be247f35c5e297dbf7bbc460bd02/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:0972488e3f9b449e83f006ead5a0e0a33ad4a13e4462e865b7c286ab7d7566a3", size = 625260, upload-time = "2026-02-20T22:50:25.949Z" }, + { url = "https://files.pythonhosted.org/packages/3c/17/f539507091334b109e7496830af2f093d9fc8082411eafd3ece58af1f8ba/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:1c238812ae0c8ffe77d8d447a32c6dfd058ea4631246b08b5a71df586ff08531", size = 587824, upload-time = "2026-02-20T22:50:35.225Z" }, + { url = "https://files.pythonhosted.org/packages/2e/c2/d37a7b2e41f153519367d4db01f0526e0d4b06f1a4a87f1c5dfca5d70a8b/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:bec8f8ef627af86abf8298e7ec50926627e29b34fa907fcfbedb45aaa72bca43", size = 551407, upload-time = "2026-02-20T22:50:44.915Z" }, + { url = "https://files.pythonhosted.org/packages/65/36/2d24b2cbe78547c6532da33fb8613debd3126eccc33a6374ab788f5e46e9/uuid_utils-0.14.1-cp39-abi3-win32.whl", hash = "sha256:b54d6aa6252d96bac1fdbc80d26ba71bad9f220b2724d692ad2f2310c22ef523", size = 183476, upload-time = "2026-02-20T22:50:32.745Z" }, + { url = "https://files.pythonhosted.org/packages/83/92/2d7e90df8b1a69ec4cff33243ce02b7a62f926ef9e2f0eca5a026889cd73/uuid_utils-0.14.1-cp39-abi3-win_amd64.whl", hash = "sha256:fc27638c2ce267a0ce3e06828aff786f91367f093c80625ee21dad0208e0f5ba", size = 187147, upload-time = "2026-02-20T22:50:45.807Z" }, + { url = "https://files.pythonhosted.org/packages/d9/26/529f4beee17e5248e37e0bc17a2761d34c0fa3b1e5729c88adb2065bae6e/uuid_utils-0.14.1-cp39-abi3-win_arm64.whl", hash = "sha256:b04cb49b42afbc4ff8dbc60cf054930afc479d6f4dd7f1ec3bbe5dbfdde06b7a", size = 188132, upload-time = "2026-02-20T22:50:41.718Z" }, + { url = "https://files.pythonhosted.org/packages/91/f9/6c64bdbf71f58ccde7919e00491812556f446a5291573af92c49a5e9aaef/uuid_utils-0.14.1-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b197cd5424cf89fb019ca7f53641d05bfe34b1879614bed111c9c313b5574cd8", size = 591617, upload-time = "2026-02-20T22:50:24.532Z" }, + { url = "https://files.pythonhosted.org/packages/d0/f0/758c3b0fb0c4871c7704fef26a5bc861de4f8a68e4831669883bebe07b0f/uuid_utils-0.14.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:12c65020ba6cb6abe1d57fcbfc2d0ea0506c67049ee031714057f5caf0f9bc9c", size = 303702, upload-time = "2026-02-20T22:50:40.687Z" }, + { url = "https://files.pythonhosted.org/packages/85/89/d91862b544c695cd58855efe3201f83894ed82fffe34500774238ab8eba7/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b5d2ad28063d422ccc2c28d46471d47b61a58de885d35113a8f18cb547e25bf", size = 337678, upload-time = "2026-02-20T22:50:39.768Z" }, + { url = "https://files.pythonhosted.org/packages/ee/6b/cf342ba8a898f1de024be0243fac67c025cad530c79ea7f89c4ce718891a/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da2234387b45fde40b0fedfee64a0ba591caeea9c48c7698ab6e2d85c7991533", size = 343711, upload-time = "2026-02-20T22:50:43.965Z" }, + { url = "https://files.pythonhosted.org/packages/b3/20/049418d094d396dfa6606b30af925cc68a6670c3b9103b23e6990f84b589/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50fffc2827348c1e48972eed3d1c698959e63f9d030aa5dd82ba451113158a62", size = 476731, upload-time = "2026-02-20T22:50:30.589Z" }, + { url = "https://files.pythonhosted.org/packages/77/a1/0857f64d53a90321e6a46a3d4cc394f50e1366132dcd2ae147f9326ca98b/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1dbe718765f70f5b7f9b7f66b6a937802941b1cc56bcf642ce0274169741e01", size = 338902, upload-time = "2026-02-20T22:50:33.927Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d0/5bf7cbf1ac138c92b9ac21066d18faf4d7e7f651047b700eb192ca4b9fdb/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:258186964039a8e36db10810c1ece879d229b01331e09e9030bc5dcabe231bd2", size = 364700, upload-time = "2026-02-20T22:50:21.732Z" }, +] + [[package]] name = "uvicorn" version = "0.37.0" @@ -4830,6 +4962,124 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + [[package]] name = "yarl" version = "1.22.0" @@ -4996,3 +5246,93 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/9a/62a9ba3a919594605a07c34eee3068659bbd648e2fa0c4a86d876810b674/zope_interface-8.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:87e6b089002c43231fb9afec89268391bcc7a3b66e76e269ffde19a8112fb8d5", size = 264201, upload-time = "2025-09-25T06:26:27.797Z" }, { url = "https://files.pythonhosted.org/packages/da/06/8fe88bd7edef60566d21ef5caca1034e10f6b87441ea85de4bbf9ea74768/zope_interface-8.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:64a43f5280aa770cbafd0307cb3d1ff430e2a1001774e8ceb40787abe4bb6658", size = 212273, upload-time = "2025-09-25T06:00:25.398Z" }, ] + +[[package]] +name = "zstandard" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/7a/28efd1d371f1acd037ac64ed1c5e2b41514a6cc937dd6ab6a13ab9f0702f/zstandard-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e59fdc271772f6686e01e1b3b74537259800f57e24280be3f29c8a0deb1904dd", size = 795256, upload-time = "2025-09-14T22:15:56.415Z" }, + { url = "https://files.pythonhosted.org/packages/96/34/ef34ef77f1ee38fc8e4f9775217a613b452916e633c4f1d98f31db52c4a5/zstandard-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d441506e9b372386a5271c64125f72d5df6d2a8e8a2a45a0ae09b03cb781ef7", size = 640565, upload-time = "2025-09-14T22:15:58.177Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1b/4fdb2c12eb58f31f28c4d28e8dc36611dd7205df8452e63f52fb6261d13e/zstandard-0.25.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:ab85470ab54c2cb96e176f40342d9ed41e58ca5733be6a893b730e7af9c40550", size = 5345306, upload-time = "2025-09-14T22:16:00.165Z" }, + { url = "https://files.pythonhosted.org/packages/73/28/a44bdece01bca027b079f0e00be3b6bd89a4df180071da59a3dd7381665b/zstandard-0.25.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e05ab82ea7753354bb054b92e2f288afb750e6b439ff6ca78af52939ebbc476d", size = 5055561, upload-time = "2025-09-14T22:16:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/e9/74/68341185a4f32b274e0fc3410d5ad0750497e1acc20bd0f5b5f64ce17785/zstandard-0.25.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:78228d8a6a1c177a96b94f7e2e8d012c55f9c760761980da16ae7546a15a8e9b", size = 5402214, upload-time = "2025-09-14T22:16:04.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/67/f92e64e748fd6aaffe01e2b75a083c0c4fd27abe1c8747fee4555fcee7dd/zstandard-0.25.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:2b6bd67528ee8b5c5f10255735abc21aa106931f0dbaf297c7be0c886353c3d0", size = 5449703, upload-time = "2025-09-14T22:16:06.312Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e5/6d36f92a197c3c17729a2125e29c169f460538a7d939a27eaaa6dcfcba8e/zstandard-0.25.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4b6d83057e713ff235a12e73916b6d356e3084fd3d14ced499d84240f3eecee0", size = 5556583, upload-time = "2025-09-14T22:16:08.457Z" }, + { url = "https://files.pythonhosted.org/packages/d7/83/41939e60d8d7ebfe2b747be022d0806953799140a702b90ffe214d557638/zstandard-0.25.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9174f4ed06f790a6869b41cba05b43eeb9a35f8993c4422ab853b705e8112bbd", size = 5045332, upload-time = "2025-09-14T22:16:10.444Z" }, + { url = "https://files.pythonhosted.org/packages/b3/87/d3ee185e3d1aa0133399893697ae91f221fda79deb61adbe998a7235c43f/zstandard-0.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25f8f3cd45087d089aef5ba3848cd9efe3ad41163d3400862fb42f81a3a46701", size = 5572283, upload-time = "2025-09-14T22:16:12.128Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1d/58635ae6104df96671076ac7d4ae7816838ce7debd94aecf83e30b7121b0/zstandard-0.25.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3756b3e9da9b83da1796f8809dd57cb024f838b9eeafde28f3cb472012797ac1", size = 4959754, upload-time = "2025-09-14T22:16:14.225Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/57e9cb0a9983e9a229dd8fd2e6e96593ef2aa82a3907188436f22b111ccd/zstandard-0.25.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:81dad8d145d8fd981b2962b686b2241d3a1ea07733e76a2f15435dfb7fb60150", size = 5266477, upload-time = "2025-09-14T22:16:16.343Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a9/ee891e5edf33a6ebce0a028726f0bbd8567effe20fe3d5808c42323e8542/zstandard-0.25.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a5a419712cf88862a45a23def0ae063686db3d324cec7edbe40509d1a79a0aab", size = 5440914, upload-time = "2025-09-14T22:16:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/58/08/a8522c28c08031a9521f27abc6f78dbdee7312a7463dd2cfc658b813323b/zstandard-0.25.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e7360eae90809efd19b886e59a09dad07da4ca9ba096752e61a2e03c8aca188e", size = 5819847, upload-time = "2025-09-14T22:16:20.559Z" }, + { url = "https://files.pythonhosted.org/packages/6f/11/4c91411805c3f7b6f31c60e78ce347ca48f6f16d552fc659af6ec3b73202/zstandard-0.25.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75ffc32a569fb049499e63ce68c743155477610532da1eb38e7f24bf7cd29e74", size = 5363131, upload-time = "2025-09-14T22:16:22.206Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d6/8c4bd38a3b24c4c7676a7a3d8de85d6ee7a983602a734b9f9cdefb04a5d6/zstandard-0.25.0-cp310-cp310-win32.whl", hash = "sha256:106281ae350e494f4ac8a80470e66d1fe27e497052c8d9c3b95dc4cf1ade81aa", size = 436469, upload-time = "2025-09-14T22:16:25.002Z" }, + { url = "https://files.pythonhosted.org/packages/93/90/96d50ad417a8ace5f841b3228e93d1bb13e6ad356737f42e2dde30d8bd68/zstandard-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea9d54cc3d8064260114a0bbf3479fc4a98b21dffc89b3459edd506b69262f6e", size = 506100, upload-time = "2025-09-14T22:16:23.569Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" }, + { url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" }, + { url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" }, + { url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" }, + { url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" }, + { url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" }, + { url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" }, + { url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" }, +]