diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index f8093838..193b35f2 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.3.0-alpha.6"
+ ".": "0.3.1-alpha.1"
}
diff --git a/.stats.yml b/.stats.yml
index bf6aaffb..aadaf6e2 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 111
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-15a929a0b71de779accc56bd09d1e5f580e216affdb408cf9890bc7a37847e9e.yml
-openapi_spec_hash: 5db9f7c7e80427cfa0298cbb01689559
-config_hash: 06758df5c4f261f9c97eafcef7e0028f
+configured_endpoints: 104
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-97b91eca4a3ff251edc02636b1a638866675d6c1abd46cd9fc18bc50a1de9656.yml
+openapi_spec_hash: 7302f1aa50090e3de78e34c184371267
+config_hash: a3829dbdaa491194d01f399784d532cd
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cc3fcc10..24d4ea22 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,26 @@
# Changelog
+## 0.3.1-alpha.1 (2025-10-22)
+
+Full Changelog: [v0.3.0-alpha.6...v0.3.1-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.3.0-alpha.6...v0.3.1-alpha.1)
+
+### Features
+
+* **api:** manual updates ([5b288dc](https://github.com/llamastack/llama-stack-client-python/commit/5b288dc7f1f1bf1ade741b2e7789077046e00a0e))
+* **api:** manual updates ([a7d787f](https://github.com/llamastack/llama-stack-client-python/commit/a7d787fd746e66208fd2855d1ed310cd8005d5ee))
+* **api:** move datasets to beta, vector_db -> vector_store ([88ed3fb](https://github.com/llamastack/llama-stack-client-python/commit/88ed3fb387aa6a752fd04b3fd82c727dd95384a0))
+* **api:** sync ([44e5dec](https://github.com/llamastack/llama-stack-client-python/commit/44e5decf4c417d7d55fd5a7ce33c1daac7ee0922))
+
+
+### Chores
+
+* bump `httpx-aiohttp` version to 0.1.9 ([794344a](https://github.com/llamastack/llama-stack-client-python/commit/794344a0baf1d85554988aba346cbd47cd2d6c2d))
+
+
+### Build System
+
+* Bump version to 0.3.0 ([fb7fa38](https://github.com/llamastack/llama-stack-client-python/commit/fb7fa389f583767bf33e6836dec2e50b842a8567))
+
## 0.3.0-alpha.6 (2025-10-13)
Full Changelog: [v0.3.0-alpha.5...v0.3.0-alpha.6](https://github.com/llamastack/llama-stack-client-python/compare/v0.3.0-alpha.5...v0.3.0-alpha.6)
diff --git a/README.md b/README.md
index c8cebcc3..ceb18678 100644
--- a/README.md
+++ b/README.md
@@ -473,4 +473,8 @@ print(llama_stack_client.__version__)
## Requirements
-Python 3.10 or higher.
+Python 3.12 or higher.
+
+## License
+
+This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
diff --git a/api.md b/api.md
index f0155d69..5f7a90a2 100644
--- a/api.md
+++ b/api.md
@@ -132,32 +132,9 @@ from llama_stack_client.types.conversations import (
Methods:
- client.conversations.items.create(conversation_id, \*\*params) -> ItemCreateResponse
-- client.conversations.items.list(conversation_id, \*\*params) -> ItemListResponse
+- client.conversations.items.list(conversation_id, \*\*params) -> SyncOpenAICursorPage[ItemListResponse]
- client.conversations.items.get(item_id, \*, conversation_id) -> ItemGetResponse
-# Datasets
-
-Types:
-
-```python
-from llama_stack_client.types import (
- ListDatasetsResponse,
- DatasetRetrieveResponse,
- DatasetListResponse,
- DatasetIterrowsResponse,
- DatasetRegisterResponse,
-)
-```
-
-Methods:
-
-- client.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse
-- client.datasets.list() -> DatasetListResponse
-- client.datasets.appendrows(dataset_id, \*\*params) -> None
-- client.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse
-- client.datasets.register(\*\*params) -> DatasetRegisterResponse
-- client.datasets.unregister(dataset_id) -> None
-
# Inspect
Types:
@@ -392,35 +369,6 @@ Methods:
- client.synthetic_data_generation.generate(\*\*params) -> SyntheticDataGenerationResponse
-# Telemetry
-
-Types:
-
-```python
-from llama_stack_client.types import (
- Event,
- QueryCondition,
- QuerySpansResponse,
- SpanWithStatus,
- Trace,
- TelemetryGetSpanResponse,
- TelemetryGetSpanTreeResponse,
- TelemetryQueryMetricsResponse,
- TelemetryQuerySpansResponse,
- TelemetryQueryTracesResponse,
-)
-```
-
-Methods:
-
-- client.telemetry.get_span(span_id, \*, trace_id) -> TelemetryGetSpanResponse
-- client.telemetry.get_span_tree(span_id, \*\*params) -> TelemetryGetSpanTreeResponse
-- client.telemetry.get_trace(trace_id) -> Trace
-- client.telemetry.query_metrics(metric_name, \*\*params) -> TelemetryQueryMetricsResponse
-- client.telemetry.query_spans(\*\*params) -> TelemetryQuerySpansResponse
-- client.telemetry.query_traces(\*\*params) -> TelemetryQueryTracesResponse
-- client.telemetry.save_spans_to_dataset(\*\*params) -> None
-
# Scoring
Types:
@@ -453,20 +401,6 @@ Methods:
- client.scoring_functions.list() -> ScoringFunctionListResponse
- client.scoring_functions.register(\*\*params) -> None
-# Benchmarks
-
-Types:
-
-```python
-from llama_stack_client.types import Benchmark, ListBenchmarksResponse, BenchmarkListResponse
-```
-
-Methods:
-
-- client.benchmarks.retrieve(benchmark_id) -> Benchmark
-- client.benchmarks.list() -> BenchmarkListResponse
-- client.benchmarks.register(\*\*params) -> None
-
# Files
Types:
@@ -533,6 +467,20 @@ Methods:
- client.alpha.post_training.job.cancel(\*\*params) -> None
- client.alpha.post_training.job.status(\*\*params) -> JobStatusResponse
+## Benchmarks
+
+Types:
+
+```python
+from llama_stack_client.types.alpha import Benchmark, ListBenchmarksResponse, BenchmarkListResponse
+```
+
+Methods:
+
+- client.alpha.benchmarks.retrieve(benchmark_id) -> Benchmark
+- client.alpha.benchmarks.list() -> BenchmarkListResponse
+- client.alpha.benchmarks.register(\*\*params) -> None
+
## Eval
Types:
@@ -628,3 +576,28 @@ Methods:
- client.alpha.agents.turn.create(session_id, \*, agent_id, \*\*params) -> Turn
- client.alpha.agents.turn.retrieve(turn_id, \*, agent_id, session_id) -> Turn
- client.alpha.agents.turn.resume(turn_id, \*, agent_id, session_id, \*\*params) -> Turn
+
+# Beta
+
+## Datasets
+
+Types:
+
+```python
+from llama_stack_client.types.beta import (
+ ListDatasetsResponse,
+ DatasetRetrieveResponse,
+ DatasetListResponse,
+ DatasetIterrowsResponse,
+ DatasetRegisterResponse,
+)
+```
+
+Methods:
+
+- client.beta.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse
+- client.beta.datasets.list() -> DatasetListResponse
+- client.beta.datasets.appendrows(dataset_id, \*\*params) -> None
+- client.beta.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse
+- client.beta.datasets.register(\*\*params) -> DatasetRegisterResponse
+- client.beta.datasets.unregister(dataset_id) -> None
diff --git a/examples/README.md b/examples/README.md
new file mode 100644
index 00000000..0fc09d50
--- /dev/null
+++ b/examples/README.md
@@ -0,0 +1,111 @@
+# Examples
+
+This directory contains example scripts and interactive tools for exploring the Llama Stack Client Python SDK.
+
+## Interactive Agent CLI
+
+`interactive_agent_cli.py` - An interactive command-line tool for exploring agent turn/step events with server-side tools.
+
+### Features
+
+- š **File Search Integration**: Automatically sets up a vector store with sample knowledge base
+- š **Event Streaming**: See real-time turn/step events as the agent processes your queries
+- šÆ **Server-Side Tools**: Demonstrates file_search and other server-side tool execution
+- š¬ **Interactive REPL**: Chat-style interface for easy exploration
+
+### Prerequisites
+
+1. Start a Llama Stack server with OpenAI provider:
+ ```bash
+ cd ~/local/llama-stack
+ source ../stack-venv/bin/activate
+ export OPENAI_API_KEY=
+ llama stack run ci-tests --port 8321
+ ```
+
+2. Install the client (from repository root):
+ ```bash
+ cd /Users/ashwin/local/new-stainless/llama-stack-client-python
+ uv sync
+ ```
+
+### Usage
+
+Basic usage (uses defaults: openai/gpt-4o, localhost:8321):
+```bash
+cd examples
+uv run python interactive_agent_cli.py
+```
+
+With custom options:
+```bash
+uv run python interactive_agent_cli.py --model openai/gpt-4o-mini --base-url http://localhost:8321
+```
+
+### Example Session
+
+```
+āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+ā ā
+ā š¤ Interactive Agent Explorer š ā
+ā ā
+ā Explore agent turn/step events with server-side tools ā
+ā ā
+āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+
+š§ Configuration:
+ Model: openai/gpt-4o
+ Server: http://localhost:8321
+
+š Connecting to server...
+ ā Connected
+
+š Setting up knowledge base...
+ Indexing documents....... ā
+ Vector store ID: vs_abc123
+
+š¤ Creating agent with tools...
+ ā Agent ready
+
+š¬ Type your questions (or 'quit' to exit, 'help' for suggestions)
+āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+
+š§ You: What is Project Phoenix?
+
+š¤ Assistant:
+
+ āāāā Turn turn_abc123 started āāāā
+ ā ā
+ ā š§ Inference Step 0 started ā
+ ā š Tool Execution Step 1 ā
+ ā Tool: knowledge_search ā
+ ā Status: server_side ā
+ ā š§ Inference Step 2 ā
+ ā ā Response: Project Phoenix... ā
+ ā ā
+ āāāā Turn completed āāāāāāāāāāāāāāā
+
+Project Phoenix is a next-generation distributed systems platform launched in 2024...
+```
+
+### What You'll See
+
+The tool uses `AgentEventLogger` to display:
+- **Turn lifecycle**: TurnStarted ā TurnCompleted
+- **Inference steps**: When the model is thinking/generating text
+- **Tool execution steps**: When server-side tools (like file_search) are running
+- **Step metadata**: Whether tools are server-side or client-side
+- **Real-time streaming**: Text appears as it's generated
+
+### Sample Questions
+
+Type `help` in the interactive session to see suggested questions, or try:
+- "What is Project Phoenix?"
+- "Who is the lead architect?"
+- "What ports does the system use?"
+- "How long do JWT tokens last?"
+- "Where is the production environment deployed?"
+
+### Exit
+
+Type `quit`, `exit`, `q`, or press `Ctrl+C` to exit.
diff --git a/examples/interactive_agent_cli.py b/examples/interactive_agent_cli.py
new file mode 100755
index 00000000..01607914
--- /dev/null
+++ b/examples/interactive_agent_cli.py
@@ -0,0 +1,325 @@
+#!/usr/bin/env python3
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+"""Interactive CLI for exploring agent turn/step events with server-side tools.
+
+Usage:
+ python interactive_agent_cli.py [--model MODEL] [--base-url URL]
+"""
+
+import argparse
+import io
+import json
+import os
+import sys
+import time
+from pathlib import Path
+from typing import Optional
+from uuid import uuid4
+
+from openai import OpenAI
+from llama_stack_client import Agent, AgentEventLogger
+
+
+CACHE_DIR = Path(os.path.expanduser("~/.cache/interactive-agent-cli"))
+CACHE_DIR.mkdir(parents=True, exist_ok=True)
+CACHE_FILE = CACHE_DIR / "vector_store.json"
+
+
+def load_cached_vector_store() -> Optional[str]:
+ try:
+ with CACHE_FILE.open("r", encoding="utf-8") as fh:
+ payload = json.load(fh)
+ return payload.get("vector_store_id")
+ except FileNotFoundError:
+ return None
+ except Exception as exc: # pragma: no cover - defensive
+ print(f"ā ļø Failed to load cached vector store info: {exc}", file=sys.stderr)
+ return None
+
+
+def save_cached_vector_store(vector_store_id: str) -> None:
+ try:
+ with CACHE_FILE.open("w", encoding="utf-8") as fh:
+ json.dump({"vector_store_id": vector_store_id}, fh)
+ except Exception as exc: # pragma: no cover - defensive
+ print(f"ā ļø Failed to cache vector store id: {exc}", file=sys.stderr)
+
+
+def ensure_vector_store(client: OpenAI) -> str:
+ cached_id = load_cached_vector_store()
+ if cached_id:
+ # Verify the vector store still exists on the server
+ existing = client.vector_stores.list().data
+ if any(store.id == cached_id for store in existing):
+ print(f"š Reusing cached knowledge base (vector store {cached_id})")
+ return cached_id
+ else:
+ print("ā ļø Cached vector store not found on server; creating a new one.")
+
+ return setup_knowledge_base(client)
+
+
+def setup_knowledge_base(client: OpenAI) -> str:
+ """Create a vector store with interesting test knowledge."""
+ print("š Setting up knowledge base...")
+
+ # Create interesting test content
+ knowledge_content = """
+ # Project Phoenix Documentation
+
+ ## Overview
+ Project Phoenix is a next-generation distributed systems platform launched in 2024.
+
+ ## Key Components
+ - **Phoenix Core**: The main orchestration engine
+ - **Phoenix Mesh**: Service mesh implementation
+ - **Phoenix Analytics**: Real-time data processing pipeline
+
+ ## Authentication
+ - Primary auth method: OAuth 2.0 with JWT tokens
+ - Token expiration: 24 hours
+ - Refresh token validity: 7 days
+
+ ## Architecture
+ The system uses a microservices architecture with:
+ - API Gateway on port 8080
+ - Auth service on port 8081
+ - Data service on port 8082
+
+ ## Team
+ - Lead Architect: Dr. Sarah Chen
+ - Security Lead: James Rodriguez
+ - DevOps Lead: Maria Santos
+
+ ## Deployment
+ - Production: AWS us-east-1
+ - Staging: AWS us-west-2
+ - Development: Local Kubernetes cluster
+ """
+
+ # Upload file
+ file_payload = io.BytesIO(knowledge_content.encode("utf-8"))
+ uploaded_file = client.files.create(
+ file=("project_phoenix_docs.txt", file_payload, "text/plain"),
+ purpose="assistants",
+ )
+
+ # Create vector store
+ vector_store = client.vector_stores.create(
+ name=f"phoenix-kb-{uuid4().hex[:8]}",
+ extra_body={
+ "provider_id": "faiss",
+ "embedding_model": "nomic-ai/nomic-embed-text-v1.5",
+ },
+ )
+
+ # Add file to vector store
+ vector_store_file = client.vector_stores.files.create(
+ vector_store_id=vector_store.id,
+ file_id=uploaded_file.id,
+ )
+
+ # Wait for ingestion
+ print(" Indexing documents...", end="", flush=True)
+ deadline = time.time() + 60.0
+ while vector_store_file.status != "completed":
+ if vector_store_file.status in {"failed", "cancelled"}:
+ raise RuntimeError(f"Vector store ingestion failed: {vector_store_file.status}")
+ if time.time() > deadline:
+ raise TimeoutError("Vector store file ingest timed out")
+ time.sleep(0.5)
+ vector_store_file = client.vector_stores.files.retrieve(
+ vector_store_id=vector_store.id,
+ file_id=vector_store_file.id,
+ )
+ print(".", end="", flush=True)
+
+ print(" ā")
+ print(f" Vector store ID: {vector_store.id}")
+ print()
+ save_cached_vector_store(vector_store.id)
+ return vector_store.id
+
+
+def print_banner():
+ """Print a nice banner."""
+ banner = """
+āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+ā ā
+ā š¤ Interactive Agent Explorer š ā
+ā ā
+ā Explore agent turn/step events with server-side tools ā
+ā ā
+āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+"""
+ print(banner)
+
+
+def create_agent_with_tools(client, model, vector_store_id):
+ """Create an agent with file_search and other server-side tools."""
+ tools = [
+ {
+ "type": "file_search",
+ "vector_store_ids": [vector_store_id],
+ }
+ ]
+
+ instructions = """You are a helpful AI assistant with access to a knowledge base about Project Phoenix.
+
+When answering questions:
+1. ALWAYS search the knowledge base first using file_search
+2. Provide specific details from the documentation
+3. If information isn't in the knowledge base, say so clearly
+4. Be concise but thorough
+
+Available tools:
+- file_search: Search the Project Phoenix documentation
+"""
+
+ agent = Agent(
+ client=client,
+ model=model,
+ instructions=instructions,
+ tools=tools,
+ )
+
+ return agent
+
+
+def interactive_loop(agent):
+ """Run the interactive query loop with nice event logging."""
+ session_id = agent.create_session(f"interactive-{uuid4().hex[:8]}")
+ print(f"š Session created: {session_id}\n")
+
+ print("š¬ Type your questions (or 'quit' to exit, 'help' for suggestions)")
+ print("ā" * 70)
+ print()
+
+ while True:
+ try:
+ # Get user input
+ user_input = input("\nš§ You: ").strip()
+
+ if not user_input:
+ continue
+
+ if user_input.lower() in {"quit", "exit", "q"}:
+ print("\nš Goodbye!")
+ break
+
+ if user_input.lower() == "help":
+ print("\nš” Try asking:")
+ print(" ⢠What is Project Phoenix?")
+ print(" ⢠Who is the lead architect?")
+ print(" ⢠What ports does the system use?")
+ print(" ⢠How long do JWT tokens last?")
+ print(" ⢠Where is the production environment deployed?")
+ continue
+
+ # Create message
+ messages = [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [{"type": "input_text", "text": user_input}],
+ }
+ ]
+
+ print()
+ print("š¤ Assistant:", end=" ", flush=True)
+
+ # Stream response with event logging
+ event_logger = AgentEventLogger()
+
+ for chunk in agent.create_turn(messages=messages, session_id=session_id, stream=True):
+ # Log the event
+ for log_msg in event_logger.log([chunk]):
+ print(log_msg, end="", flush=True)
+
+ print() # New line after response
+
+ except KeyboardInterrupt:
+ print("\n\nš Goodbye!")
+ break
+ except Exception as e:
+ print(f"\nā Error: {e}", file=sys.stderr)
+ print(" Please try again or type 'quit' to exit", file=sys.stderr)
+
+
+def main():
+ """Main entry point."""
+ parser = argparse.ArgumentParser(
+ description="Interactive agent CLI with server-side tools",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog="""
+Examples:
+ %(prog)s
+ %(prog)s --model openai/gpt-4o
+ %(prog)s --base-url http://localhost:8321/v1
+ """,
+ )
+ parser.add_argument(
+ "--model",
+ default="openai/gpt-4o",
+ help="Model to use (default: openai/gpt-4o)",
+ )
+ parser.add_argument(
+ "--base-url",
+ default="http://localhost:8321/v1",
+ help="Llama Stack server URL (default: http://localhost:8321/v1)",
+ )
+
+ args = parser.parse_args()
+
+ print_banner()
+ print(f"š§ Configuration:")
+ print(f" Model: {args.model}")
+ print(f" Server: {args.base_url}")
+ print()
+
+ # Create client
+ print("š Connecting to server...")
+ try:
+ client = OpenAI(base_url=args.base_url)
+ models = client.models.list()
+ identifiers = [model.identifier for model in models]
+ if args.model not in identifiers:
+ print(f" ā Model {args.model} not found", file=sys.stderr)
+ print(f" Available models: {', '.join(identifiers)}", file=sys.stderr)
+ sys.exit(1)
+
+ print(" ā Connected")
+ print()
+ except Exception as e:
+ print(f" ā Failed to connect: {e}", file=sys.stderr)
+ print(f"\n Make sure the server is running at {args.base_url}", file=sys.stderr)
+ sys.exit(1)
+
+ # Setup knowledge base
+ try:
+ print("š Setting up knowledge base...")
+ vector_store_id = ensure_vector_store(client)
+ except Exception as e:
+ print(f"ā Failed to setup knowledge base: {e}", file=sys.stderr)
+ sys.exit(1)
+
+ # Create agent
+ print("š¤ Creating agent with tools...")
+ try:
+ agent = create_agent_with_tools(client, args.model, vector_store_id)
+ print(" ā Agent ready")
+ print()
+ except Exception as e:
+ print(f" ā Failed to create agent: {e}", file=sys.stderr)
+ sys.exit(1)
+
+ # Run interactive loop
+ interactive_loop(agent)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/pyproject.toml b/pyproject.toml
index 63c6129a..447a59ae 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
-version = "0.3.0-alpha.6"
+version = "0.3.1-alpha.1"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "MIT"
@@ -41,7 +41,7 @@ Homepage = "https://github.com/llamastack/llama-stack-client-python"
Repository = "https://github.com/llamastack/llama-stack-client-python"
[project.optional-dependencies]
-aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"]
+aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.9"]
[tool.uv]
managed = true
diff --git a/requirements-dev.lock b/requirements-dev.lock
index 003a453c..d4264113 100644
--- a/requirements-dev.lock
+++ b/requirements-dev.lock
@@ -7,7 +7,7 @@ anyio==4.8.0
# via
# httpx
# llama-stack-client
-black==25.1.0
+black==25.9.0
certifi==2024.12.14
# via
# httpcore
@@ -15,9 +15,9 @@ certifi==2024.12.14
# requests
cfgv==3.4.0
# via pre-commit
-charset-normalizer==3.4.3
+charset-normalizer==3.4.4
# via requests
-click==8.2.1
+click==8.3.0
# via
# black
# llama-stack-client
@@ -33,7 +33,7 @@ distro==1.9.0
# via llama-stack-client
execnet==2.1.1
# via pytest-xdist
-filelock==3.19.1
+filelock==3.20.0
# via virtualenv
fire==0.7.1
# via llama-stack-client
@@ -45,7 +45,7 @@ httpx==0.28.1
# via
# llama-stack-client
# respx
-identify==2.6.14
+identify==2.6.15
# via pre-commit
idna==3.10
# via
@@ -68,17 +68,17 @@ nodeenv==1.9.1
# via
# pre-commit
# pyright
-numpy==2.3.3
+numpy==2.3.4
# via pandas
packaging==24.2
# via
# black
# pytest
-pandas==2.3.2
+pandas==2.3.3
# via llama-stack-client
pathspec==0.12.1
# via black
-platformdirs==4.4.0
+platformdirs==4.5.0
# via
# black
# virtualenv
@@ -108,9 +108,11 @@ python-dateutil==2.9.0.post0
# via
# pandas
# time-machine
+pytokens==0.2.0
+ # via black
pytz==2024.2
# via pandas
-pyyaml==6.0.2
+pyyaml==6.0.3
# via
# pre-commit
# pyaml
@@ -148,9 +150,9 @@ tzdata==2025.2
# via pandas
urllib3==2.5.0
# via requests
-virtualenv==20.34.0
+virtualenv==20.35.3
# via pre-commit
-wcwidth==0.2.13
+wcwidth==0.2.14
# via prompt-toolkit
zipp==3.21.0
# via importlib-metadata
diff --git a/scripts/utils/ruffen-docs.py b/scripts/utils/ruffen-docs.py
index 0cf2bd2f..8ed17bd0 100644
--- a/scripts/utils/ruffen-docs.py
+++ b/scripts/utils/ruffen-docs.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# fork of https://github.com/asottile/blacken-docs adapted for ruff
from __future__ import annotations
diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh
index 8593351a..32bd7e9e 100755
--- a/scripts/utils/upload-artifact.sh
+++ b/scripts/utils/upload-artifact.sh
@@ -1,4 +1,10 @@
-#!/usr/bin/env bash
+#!/bin/bash
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
set -exuo pipefail
FILENAME=$(basename dist/*.whl)
diff --git a/src/llama_stack_client/__init__.py b/src/llama_stack_client/__init__.py
index cc2fcb9b..6d139d45 100644
--- a/src/llama_stack_client/__init__.py
+++ b/src/llama_stack_client/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import typing as _t
@@ -41,8 +47,8 @@
from .lib.agents.agent import Agent
from .lib.agents.event_logger import EventLogger as AgentEventLogger
from .lib.inference.event_logger import EventLogger as InferenceEventLogger
-from .types.alpha.agents.turn_create_params import Document
from .types.shared_params.document import Document as RAGDocument
+from .types.alpha.agents.turn_create_params import Document
__all__ = [
"types",
diff --git a/src/llama_stack_client/_base_client.py b/src/llama_stack_client/_base_client.py
index 92f5cee4..bc3bf7b6 100644
--- a/src/llama_stack_client/_base_client.py
+++ b/src/llama_stack_client/_base_client.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import sys
diff --git a/src/llama_stack_client/_client.py b/src/llama_stack_client/_client.py
index 4fe29cbb..34de181a 100644
--- a/src/llama_stack_client/_client.py
+++ b/src/llama_stack_client/_client.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -33,6 +39,7 @@
if TYPE_CHECKING:
from .resources import (
+ beta,
chat,
alpha,
files,
@@ -43,12 +50,9 @@
inspect,
scoring,
shields,
- datasets,
providers,
responses,
- telemetry,
vector_io,
- benchmarks,
embeddings,
toolgroups,
completions,
@@ -66,12 +70,10 @@
from .resources.inspect import InspectResource, AsyncInspectResource
from .resources.scoring import ScoringResource, AsyncScoringResource
from .resources.shields import ShieldsResource, AsyncShieldsResource
- from .resources.datasets import DatasetsResource, AsyncDatasetsResource
+ from .resources.beta.beta import BetaResource, AsyncBetaResource
from .resources.chat.chat import ChatResource, AsyncChatResource
from .resources.providers import ProvidersResource, AsyncProvidersResource
- from .resources.telemetry import TelemetryResource, AsyncTelemetryResource
from .resources.vector_io import VectorIoResource, AsyncVectorIoResource
- from .resources.benchmarks import BenchmarksResource, AsyncBenchmarksResource
from .resources.embeddings import EmbeddingsResource, AsyncEmbeddingsResource
from .resources.toolgroups import ToolgroupsResource, AsyncToolgroupsResource
from .resources.alpha.alpha import AlphaResource, AsyncAlphaResource
@@ -187,12 +189,6 @@ def conversations(self) -> ConversationsResource:
return ConversationsResource(self)
- @cached_property
- def datasets(self) -> DatasetsResource:
- from .resources.datasets import DatasetsResource
-
- return DatasetsResource(self)
-
@cached_property
def inspect(self) -> InspectResource:
from .resources.inspect import InspectResource
@@ -271,12 +267,6 @@ def synthetic_data_generation(self) -> SyntheticDataGenerationResource:
return SyntheticDataGenerationResource(self)
- @cached_property
- def telemetry(self) -> TelemetryResource:
- from .resources.telemetry import TelemetryResource
-
- return TelemetryResource(self)
-
@cached_property
def scoring(self) -> ScoringResource:
from .resources.scoring import ScoringResource
@@ -289,12 +279,6 @@ def scoring_functions(self) -> ScoringFunctionsResource:
return ScoringFunctionsResource(self)
- @cached_property
- def benchmarks(self) -> BenchmarksResource:
- from .resources.benchmarks import BenchmarksResource
-
- return BenchmarksResource(self)
-
@cached_property
def files(self) -> FilesResource:
from .resources.files import FilesResource
@@ -307,6 +291,12 @@ def alpha(self) -> AlphaResource:
return AlphaResource(self)
+ @cached_property
+ def beta(self) -> BetaResource:
+ from .resources.beta import BetaResource
+
+ return BetaResource(self)
+
@cached_property
def with_raw_response(self) -> LlamaStackClientWithRawResponse:
return LlamaStackClientWithRawResponse(self)
@@ -509,12 +499,6 @@ def conversations(self) -> AsyncConversationsResource:
return AsyncConversationsResource(self)
- @cached_property
- def datasets(self) -> AsyncDatasetsResource:
- from .resources.datasets import AsyncDatasetsResource
-
- return AsyncDatasetsResource(self)
-
@cached_property
def inspect(self) -> AsyncInspectResource:
from .resources.inspect import AsyncInspectResource
@@ -593,12 +577,6 @@ def synthetic_data_generation(self) -> AsyncSyntheticDataGenerationResource:
return AsyncSyntheticDataGenerationResource(self)
- @cached_property
- def telemetry(self) -> AsyncTelemetryResource:
- from .resources.telemetry import AsyncTelemetryResource
-
- return AsyncTelemetryResource(self)
-
@cached_property
def scoring(self) -> AsyncScoringResource:
from .resources.scoring import AsyncScoringResource
@@ -611,12 +589,6 @@ def scoring_functions(self) -> AsyncScoringFunctionsResource:
return AsyncScoringFunctionsResource(self)
- @cached_property
- def benchmarks(self) -> AsyncBenchmarksResource:
- from .resources.benchmarks import AsyncBenchmarksResource
-
- return AsyncBenchmarksResource(self)
-
@cached_property
def files(self) -> AsyncFilesResource:
from .resources.files import AsyncFilesResource
@@ -629,6 +601,12 @@ def alpha(self) -> AsyncAlphaResource:
return AsyncAlphaResource(self)
+ @cached_property
+ def beta(self) -> AsyncBetaResource:
+ from .resources.beta import AsyncBetaResource
+
+ return AsyncBetaResource(self)
+
@cached_property
def with_raw_response(self) -> AsyncLlamaStackClientWithRawResponse:
return AsyncLlamaStackClientWithRawResponse(self)
@@ -780,12 +758,6 @@ def conversations(self) -> conversations.ConversationsResourceWithRawResponse:
return ConversationsResourceWithRawResponse(self._client.conversations)
- @cached_property
- def datasets(self) -> datasets.DatasetsResourceWithRawResponse:
- from .resources.datasets import DatasetsResourceWithRawResponse
-
- return DatasetsResourceWithRawResponse(self._client.datasets)
-
@cached_property
def inspect(self) -> inspect.InspectResourceWithRawResponse:
from .resources.inspect import InspectResourceWithRawResponse
@@ -864,12 +836,6 @@ def synthetic_data_generation(self) -> synthetic_data_generation.SyntheticDataGe
return SyntheticDataGenerationResourceWithRawResponse(self._client.synthetic_data_generation)
- @cached_property
- def telemetry(self) -> telemetry.TelemetryResourceWithRawResponse:
- from .resources.telemetry import TelemetryResourceWithRawResponse
-
- return TelemetryResourceWithRawResponse(self._client.telemetry)
-
@cached_property
def scoring(self) -> scoring.ScoringResourceWithRawResponse:
from .resources.scoring import ScoringResourceWithRawResponse
@@ -882,12 +848,6 @@ def scoring_functions(self) -> scoring_functions.ScoringFunctionsResourceWithRaw
return ScoringFunctionsResourceWithRawResponse(self._client.scoring_functions)
- @cached_property
- def benchmarks(self) -> benchmarks.BenchmarksResourceWithRawResponse:
- from .resources.benchmarks import BenchmarksResourceWithRawResponse
-
- return BenchmarksResourceWithRawResponse(self._client.benchmarks)
-
@cached_property
def files(self) -> files.FilesResourceWithRawResponse:
from .resources.files import FilesResourceWithRawResponse
@@ -900,6 +860,12 @@ def alpha(self) -> alpha.AlphaResourceWithRawResponse:
return AlphaResourceWithRawResponse(self._client.alpha)
+ @cached_property
+ def beta(self) -> beta.BetaResourceWithRawResponse:
+ from .resources.beta import BetaResourceWithRawResponse
+
+ return BetaResourceWithRawResponse(self._client.beta)
+
class AsyncLlamaStackClientWithRawResponse:
_client: AsyncLlamaStackClient
@@ -937,12 +903,6 @@ def conversations(self) -> conversations.AsyncConversationsResourceWithRawRespon
return AsyncConversationsResourceWithRawResponse(self._client.conversations)
- @cached_property
- def datasets(self) -> datasets.AsyncDatasetsResourceWithRawResponse:
- from .resources.datasets import AsyncDatasetsResourceWithRawResponse
-
- return AsyncDatasetsResourceWithRawResponse(self._client.datasets)
-
@cached_property
def inspect(self) -> inspect.AsyncInspectResourceWithRawResponse:
from .resources.inspect import AsyncInspectResourceWithRawResponse
@@ -1023,12 +983,6 @@ def synthetic_data_generation(
return AsyncSyntheticDataGenerationResourceWithRawResponse(self._client.synthetic_data_generation)
- @cached_property
- def telemetry(self) -> telemetry.AsyncTelemetryResourceWithRawResponse:
- from .resources.telemetry import AsyncTelemetryResourceWithRawResponse
-
- return AsyncTelemetryResourceWithRawResponse(self._client.telemetry)
-
@cached_property
def scoring(self) -> scoring.AsyncScoringResourceWithRawResponse:
from .resources.scoring import AsyncScoringResourceWithRawResponse
@@ -1041,12 +995,6 @@ def scoring_functions(self) -> scoring_functions.AsyncScoringFunctionsResourceWi
return AsyncScoringFunctionsResourceWithRawResponse(self._client.scoring_functions)
- @cached_property
- def benchmarks(self) -> benchmarks.AsyncBenchmarksResourceWithRawResponse:
- from .resources.benchmarks import AsyncBenchmarksResourceWithRawResponse
-
- return AsyncBenchmarksResourceWithRawResponse(self._client.benchmarks)
-
@cached_property
def files(self) -> files.AsyncFilesResourceWithRawResponse:
from .resources.files import AsyncFilesResourceWithRawResponse
@@ -1059,6 +1007,12 @@ def alpha(self) -> alpha.AsyncAlphaResourceWithRawResponse:
return AsyncAlphaResourceWithRawResponse(self._client.alpha)
+ @cached_property
+ def beta(self) -> beta.AsyncBetaResourceWithRawResponse:
+ from .resources.beta import AsyncBetaResourceWithRawResponse
+
+ return AsyncBetaResourceWithRawResponse(self._client.beta)
+
class LlamaStackClientWithStreamedResponse:
_client: LlamaStackClient
@@ -1096,12 +1050,6 @@ def conversations(self) -> conversations.ConversationsResourceWithStreamingRespo
return ConversationsResourceWithStreamingResponse(self._client.conversations)
- @cached_property
- def datasets(self) -> datasets.DatasetsResourceWithStreamingResponse:
- from .resources.datasets import DatasetsResourceWithStreamingResponse
-
- return DatasetsResourceWithStreamingResponse(self._client.datasets)
-
@cached_property
def inspect(self) -> inspect.InspectResourceWithStreamingResponse:
from .resources.inspect import InspectResourceWithStreamingResponse
@@ -1182,12 +1130,6 @@ def synthetic_data_generation(
return SyntheticDataGenerationResourceWithStreamingResponse(self._client.synthetic_data_generation)
- @cached_property
- def telemetry(self) -> telemetry.TelemetryResourceWithStreamingResponse:
- from .resources.telemetry import TelemetryResourceWithStreamingResponse
-
- return TelemetryResourceWithStreamingResponse(self._client.telemetry)
-
@cached_property
def scoring(self) -> scoring.ScoringResourceWithStreamingResponse:
from .resources.scoring import ScoringResourceWithStreamingResponse
@@ -1200,12 +1142,6 @@ def scoring_functions(self) -> scoring_functions.ScoringFunctionsResourceWithStr
return ScoringFunctionsResourceWithStreamingResponse(self._client.scoring_functions)
- @cached_property
- def benchmarks(self) -> benchmarks.BenchmarksResourceWithStreamingResponse:
- from .resources.benchmarks import BenchmarksResourceWithStreamingResponse
-
- return BenchmarksResourceWithStreamingResponse(self._client.benchmarks)
-
@cached_property
def files(self) -> files.FilesResourceWithStreamingResponse:
from .resources.files import FilesResourceWithStreamingResponse
@@ -1218,6 +1154,12 @@ def alpha(self) -> alpha.AlphaResourceWithStreamingResponse:
return AlphaResourceWithStreamingResponse(self._client.alpha)
+ @cached_property
+ def beta(self) -> beta.BetaResourceWithStreamingResponse:
+ from .resources.beta import BetaResourceWithStreamingResponse
+
+ return BetaResourceWithStreamingResponse(self._client.beta)
+
class AsyncLlamaStackClientWithStreamedResponse:
_client: AsyncLlamaStackClient
@@ -1255,12 +1197,6 @@ def conversations(self) -> conversations.AsyncConversationsResourceWithStreaming
return AsyncConversationsResourceWithStreamingResponse(self._client.conversations)
- @cached_property
- def datasets(self) -> datasets.AsyncDatasetsResourceWithStreamingResponse:
- from .resources.datasets import AsyncDatasetsResourceWithStreamingResponse
-
- return AsyncDatasetsResourceWithStreamingResponse(self._client.datasets)
-
@cached_property
def inspect(self) -> inspect.AsyncInspectResourceWithStreamingResponse:
from .resources.inspect import AsyncInspectResourceWithStreamingResponse
@@ -1341,12 +1277,6 @@ def synthetic_data_generation(
return AsyncSyntheticDataGenerationResourceWithStreamingResponse(self._client.synthetic_data_generation)
- @cached_property
- def telemetry(self) -> telemetry.AsyncTelemetryResourceWithStreamingResponse:
- from .resources.telemetry import AsyncTelemetryResourceWithStreamingResponse
-
- return AsyncTelemetryResourceWithStreamingResponse(self._client.telemetry)
-
@cached_property
def scoring(self) -> scoring.AsyncScoringResourceWithStreamingResponse:
from .resources.scoring import AsyncScoringResourceWithStreamingResponse
@@ -1359,12 +1289,6 @@ def scoring_functions(self) -> scoring_functions.AsyncScoringFunctionsResourceWi
return AsyncScoringFunctionsResourceWithStreamingResponse(self._client.scoring_functions)
- @cached_property
- def benchmarks(self) -> benchmarks.AsyncBenchmarksResourceWithStreamingResponse:
- from .resources.benchmarks import AsyncBenchmarksResourceWithStreamingResponse
-
- return AsyncBenchmarksResourceWithStreamingResponse(self._client.benchmarks)
-
@cached_property
def files(self) -> files.AsyncFilesResourceWithStreamingResponse:
from .resources.files import AsyncFilesResourceWithStreamingResponse
@@ -1377,6 +1301,12 @@ def alpha(self) -> alpha.AsyncAlphaResourceWithStreamingResponse:
return AsyncAlphaResourceWithStreamingResponse(self._client.alpha)
+ @cached_property
+ def beta(self) -> beta.AsyncBetaResourceWithStreamingResponse:
+ from .resources.beta import AsyncBetaResourceWithStreamingResponse
+
+ return AsyncBetaResourceWithStreamingResponse(self._client.beta)
+
Client = LlamaStackClient
diff --git a/src/llama_stack_client/_compat.py b/src/llama_stack_client/_compat.py
index bdef67f0..3378ed8f 100644
--- a/src/llama_stack_client/_compat.py
+++ b/src/llama_stack_client/_compat.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
diff --git a/src/llama_stack_client/_constants.py b/src/llama_stack_client/_constants.py
index 6ddf2c71..a65117ad 100644
--- a/src/llama_stack_client/_constants.py
+++ b/src/llama_stack_client/_constants.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import httpx
diff --git a/src/llama_stack_client/_exceptions.py b/src/llama_stack_client/_exceptions.py
index 54cb1cde..c36e94a7 100644
--- a/src/llama_stack_client/_exceptions.py
+++ b/src/llama_stack_client/_exceptions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/_files.py b/src/llama_stack_client/_files.py
index 035a1144..f368c297 100644
--- a/src/llama_stack_client/_files.py
+++ b/src/llama_stack_client/_files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import io
diff --git a/src/llama_stack_client/_models.py b/src/llama_stack_client/_models.py
index 6a3cd1d2..7d0177ec 100644
--- a/src/llama_stack_client/_models.py
+++ b/src/llama_stack_client/_models.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import os
diff --git a/src/llama_stack_client/_qs.py b/src/llama_stack_client/_qs.py
index ada6fd3f..1bb860d0 100644
--- a/src/llama_stack_client/_qs.py
+++ b/src/llama_stack_client/_qs.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from typing import Any, List, Tuple, Union, Mapping, TypeVar
diff --git a/src/llama_stack_client/_resource.py b/src/llama_stack_client/_resource.py
index 8a6f4ec6..03b3b142 100644
--- a/src/llama_stack_client/_resource.py
+++ b/src/llama_stack_client/_resource.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/_response.py b/src/llama_stack_client/_response.py
index 8486ab8e..e68e8144 100644
--- a/src/llama_stack_client/_response.py
+++ b/src/llama_stack_client/_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import os
diff --git a/src/llama_stack_client/_streaming.py b/src/llama_stack_client/_streaming.py
index 8c436e93..5f23d84d 100644
--- a/src/llama_stack_client/_streaming.py
+++ b/src/llama_stack_client/_streaming.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py
from __future__ import annotations
diff --git a/src/llama_stack_client/_types.py b/src/llama_stack_client/_types.py
index 32548daa..1059615e 100644
--- a/src/llama_stack_client/_types.py
+++ b/src/llama_stack_client/_types.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from os import PathLike
diff --git a/src/llama_stack_client/_utils/__init__.py b/src/llama_stack_client/_utils/__init__.py
index dc64e29a..fedf397d 100644
--- a/src/llama_stack_client/_utils/__init__.py
+++ b/src/llama_stack_client/_utils/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from ._sync import asyncify as asyncify
from ._proxy import LazyProxy as LazyProxy
from ._utils import (
diff --git a/src/llama_stack_client/_utils/_compat.py b/src/llama_stack_client/_utils/_compat.py
index dd703233..cdc04c46 100644
--- a/src/llama_stack_client/_utils/_compat.py
+++ b/src/llama_stack_client/_utils/_compat.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import sys
diff --git a/src/llama_stack_client/_utils/_datetime_parse.py b/src/llama_stack_client/_utils/_datetime_parse.py
index 7cb9d9e6..6a2c21d3 100644
--- a/src/llama_stack_client/_utils/_datetime_parse.py
+++ b/src/llama_stack_client/_utils/_datetime_parse.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
"""
This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
without the Pydantic v1 specific errors.
diff --git a/src/llama_stack_client/_utils/_logs.py b/src/llama_stack_client/_utils/_logs.py
index 20b6cc06..61999301 100644
--- a/src/llama_stack_client/_utils/_logs.py
+++ b/src/llama_stack_client/_utils/_logs.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import os
import logging
diff --git a/src/llama_stack_client/_utils/_proxy.py b/src/llama_stack_client/_utils/_proxy.py
index 0f239a33..b7be6f22 100644
--- a/src/llama_stack_client/_utils/_proxy.py
+++ b/src/llama_stack_client/_utils/_proxy.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from abc import ABC, abstractmethod
diff --git a/src/llama_stack_client/_utils/_reflection.py b/src/llama_stack_client/_utils/_reflection.py
index 89aa712a..e42a78b5 100644
--- a/src/llama_stack_client/_utils/_reflection.py
+++ b/src/llama_stack_client/_utils/_reflection.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import inspect
diff --git a/src/llama_stack_client/_utils/_resources_proxy.py b/src/llama_stack_client/_utils/_resources_proxy.py
index bf0a876a..364048bc 100644
--- a/src/llama_stack_client/_utils/_resources_proxy.py
+++ b/src/llama_stack_client/_utils/_resources_proxy.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from typing import Any
diff --git a/src/llama_stack_client/_utils/_streams.py b/src/llama_stack_client/_utils/_streams.py
index f4a0208f..fb09b0ce 100644
--- a/src/llama_stack_client/_utils/_streams.py
+++ b/src/llama_stack_client/_utils/_streams.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from typing import Any
from typing_extensions import Iterator, AsyncIterator
diff --git a/src/llama_stack_client/_utils/_sync.py b/src/llama_stack_client/_utils/_sync.py
index ad7ec71b..7c6e2342 100644
--- a/src/llama_stack_client/_utils/_sync.py
+++ b/src/llama_stack_client/_utils/_sync.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import sys
diff --git a/src/llama_stack_client/_utils/_transform.py b/src/llama_stack_client/_utils/_transform.py
index 52075492..d05d92dd 100644
--- a/src/llama_stack_client/_utils/_transform.py
+++ b/src/llama_stack_client/_utils/_transform.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import io
diff --git a/src/llama_stack_client/_utils/_typing.py b/src/llama_stack_client/_utils/_typing.py
index 193109f3..0771a1a9 100644
--- a/src/llama_stack_client/_utils/_typing.py
+++ b/src/llama_stack_client/_utils/_typing.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import sys
diff --git a/src/llama_stack_client/_utils/_utils.py b/src/llama_stack_client/_utils/_utils.py
index 50d59269..90d1439c 100644
--- a/src/llama_stack_client/_utils/_utils.py
+++ b/src/llama_stack_client/_utils/_utils.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import os
diff --git a/src/llama_stack_client/_version.py b/src/llama_stack_client/_version.py
index ef68ddd4..18b4a18e 100644
--- a/src/llama_stack_client/_version.py
+++ b/src/llama_stack_client/_version.py
@@ -1,4 +1,10 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "llama_stack_client"
-__version__ = "0.2.23"
+__version__ = "0.3.0"
diff --git a/src/llama_stack_client/_wrappers.py b/src/llama_stack_client/_wrappers.py
index 471b39dd..a5579854 100644
--- a/src/llama_stack_client/_wrappers.py
+++ b/src/llama_stack_client/_wrappers.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Generic, TypeVar
diff --git a/src/llama_stack_client/lib/agents/agent.py b/src/llama_stack_client/lib/agents/agent.py
index 5e00a88b..417a53e2 100644
--- a/src/llama_stack_client/lib/agents/agent.py
+++ b/src/llama_stack_client/lib/agents/agent.py
@@ -3,244 +3,161 @@
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
+import json
import logging
-from typing import Any, AsyncIterator, Callable, Iterator, List, Optional, Tuple, Union
-
-from llama_stack_client import LlamaStackClient
-from llama_stack_client.types import ToolResponseMessage, UserMessage
-from llama_stack_client.types.alpha import ToolResponseParam
-from llama_stack_client.types.alpha.agent_create_params import AgentConfig
-from llama_stack_client.types.alpha.agents.agent_turn_response_stream_chunk import (
- AgentTurnResponseStreamChunk,
+from typing import (
+ Any,
+ AsyncIterator,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ Union,
+ TypedDict,
)
-from llama_stack_client.types.alpha.agents.turn import CompletionMessage, Turn
-from llama_stack_client.types.alpha.agents.turn_create_params import Document, Toolgroup
-from llama_stack_client.types.shared.tool_call import ToolCall
-from llama_stack_client.types.shared_params.agent_config import ToolConfig
-from llama_stack_client.types.shared_params.response_format import ResponseFormat
-from llama_stack_client.types.shared_params.sampling_params import SamplingParams
+from uuid import uuid4
from ..._types import Headers
from .client_tool import ClientTool, client_tool
from .tool_parser import ToolParser
+from .turn_events import (
+ AgentStreamChunk,
+ StepCompleted,
+ StepProgress,
+ StepStarted,
+ ToolCallIssuedDelta,
+ TurnFailed,
+ ToolExecutionStepResult,
+)
+from .event_synthesizer import TurnEventSynthesizer
+from .types import CompletionMessage, ToolCall, ToolResponse
-DEFAULT_MAX_ITER = 10
-logger = logging.getLogger(__name__)
+class ToolResponsePayload(TypedDict, total=False):
+ call_id: str
+ tool_name: str
+ content: Any
+ metadata: Dict[str, Any]
-class AgentUtils:
- @staticmethod
- def get_client_tools(
- tools: Optional[List[Union[Toolgroup, ClientTool, Callable[..., Any]]]],
- ) -> List[ClientTool]:
- if not tools:
- return []
+logger = logging.getLogger(__name__)
- # Wrap any function in client_tool decorator
- tools = [client_tool(tool) if (callable(tool) and not isinstance(tool, ClientTool)) else tool for tool in tools]
- return [tool for tool in tools if isinstance(tool, ClientTool)]
+class ToolUtils:
@staticmethod
- def get_tool_calls(chunk: AgentTurnResponseStreamChunk, tool_parser: Optional[ToolParser] = None) -> List[ToolCall]:
- if chunk.event.payload.event_type not in {
- "turn_complete",
- "turn_awaiting_input",
- }:
- return []
-
- message = chunk.event.payload.turn.output_message
- if message.stop_reason == "out_of_tokens":
- return []
-
- if tool_parser:
- return tool_parser.get_tool_calls(message)
-
- return message.tool_calls
+ def coerce_tool_content(content: Any) -> str:
+ if isinstance(content, str):
+ return content
+ if content is None:
+ return ""
+ if isinstance(content, (dict, list)):
+ try:
+ return json.dumps(content)
+ except TypeError:
+ return str(content)
+ return str(content)
@staticmethod
- def get_turn_id(chunk: AgentTurnResponseStreamChunk) -> Optional[str]:
- if chunk.event.payload.event_type not in [
- "turn_complete",
- "turn_awaiting_input",
- ]:
- return None
-
- return chunk.event.payload.turn.turn_id
+ def parse_tool_arguments(arguments: Any) -> Dict[str, Any]:
+ if isinstance(arguments, dict):
+ return arguments
+ if not arguments:
+ return {}
+ if isinstance(arguments, str):
+ try:
+ parsed = json.loads(arguments)
+ except json.JSONDecodeError:
+ logger.warning("Failed to decode tool arguments JSON", exc_info=True)
+ return {}
+ if isinstance(parsed, dict):
+ return parsed
+ logger.warning("Tool arguments JSON did not decode into a dict: %s", type(parsed))
+ return {}
+ logger.warning("Unsupported tool arguments type: %s", type(arguments))
+ return {}
@staticmethod
- def get_agent_config(
- model: Optional[str] = None,
- instructions: Optional[str] = None,
- tools: Optional[List[Union[Toolgroup, ClientTool, Callable[..., Any]]]] = None,
- tool_config: Optional[ToolConfig] = None,
- sampling_params: Optional[SamplingParams] = None,
- max_infer_iters: Optional[int] = None,
- input_shields: Optional[List[str]] = None,
- output_shields: Optional[List[str]] = None,
- response_format: Optional[ResponseFormat] = None,
- enable_session_persistence: Optional[bool] = None,
- name: str | None = None,
- ) -> AgentConfig:
- # Create a minimal valid AgentConfig with required fields
- if model is None or instructions is None:
- raise ValueError("Both 'model' and 'instructions' are required when agent_config is not provided")
-
- agent_config = {
- "model": model,
- "instructions": instructions,
- "toolgroups": [],
- "client_tools": [],
- }
-
- # Add optional parameters if provided
- if enable_session_persistence is not None:
- agent_config["enable_session_persistence"] = enable_session_persistence
- if max_infer_iters is not None:
- agent_config["max_infer_iters"] = max_infer_iters
- if input_shields is not None:
- agent_config["input_shields"] = input_shields
- if output_shields is not None:
- agent_config["output_shields"] = output_shields
- if response_format is not None:
- agent_config["response_format"] = response_format
- if sampling_params is not None:
- agent_config["sampling_params"] = sampling_params
- if tool_config is not None:
- agent_config["tool_config"] = tool_config
- if name is not None:
- agent_config["name"] = name
- if tools is not None:
- toolgroups: List[Toolgroup] = []
- for tool in tools:
- if isinstance(tool, str) or isinstance(tool, dict):
- toolgroups.append(tool)
-
- agent_config["toolgroups"] = toolgroups
- agent_config["client_tools"] = [tool.get_tool_definition() for tool in AgentUtils.get_client_tools(tools)]
-
- agent_config = AgentConfig(**agent_config)
- return agent_config
+ def normalize_tool_response(tool_response: Any) -> ToolResponsePayload:
+ if isinstance(tool_response, ToolResponse):
+ payload: ToolResponsePayload = {
+ "call_id": tool_response.call_id,
+ "tool_name": str(tool_response.tool_name),
+ "content": ToolUtils.coerce_tool_content(tool_response.content),
+ "metadata": dict(tool_response.metadata),
+ }
+ return payload
+
+ if isinstance(tool_response, dict):
+ call_id = tool_response.get("call_id")
+ tool_name = tool_response.get("tool_name")
+ if call_id is None or tool_name is None:
+ raise KeyError("Tool response missing required keys 'call_id' or 'tool_name'")
+ payload: ToolResponsePayload = {
+ "call_id": str(call_id),
+ "tool_name": str(tool_name),
+ "content": ToolUtils.coerce_tool_content(tool_response.get("content")),
+ "metadata": dict(tool_response.get("metadata") or {}),
+ }
+ return payload
+
+ raise TypeError(f"Unsupported tool response type: {type(tool_response)!r}")
class Agent:
def __init__(
self,
- client: LlamaStackClient,
- # begin deprecated
- agent_config: Optional[AgentConfig] = None,
- client_tools: Tuple[ClientTool, ...] = (),
- # end deprecated
+ client: Any, # Accept any OpenAI-compatible client
+ *,
+ model: str,
+ instructions: str,
+ tools: Optional[List[Union[Dict[str, Any], ClientTool, Callable[..., Any]]]] = None,
tool_parser: Optional[ToolParser] = None,
- model: Optional[str] = None,
- instructions: Optional[str] = None,
- tools: Optional[List[Union[Toolgroup, ClientTool, Callable[..., Any]]]] = None,
- tool_config: Optional[ToolConfig] = None,
- sampling_params: Optional[SamplingParams] = None,
- max_infer_iters: Optional[int] = None,
- input_shields: Optional[List[str]] = None,
- output_shields: Optional[List[str]] = None,
- response_format: Optional[ResponseFormat] = None,
- enable_session_persistence: Optional[bool] = None,
extra_headers: Headers | None = None,
- name: str | None = None,
):
- """Construct an Agent with the given parameters.
-
- :param client: The LlamaStackClient instance.
- :param agent_config: The AgentConfig instance.
- ::deprecated: use other parameters instead
- :param client_tools: A tuple of ClientTool instances.
- ::deprecated: use tools instead
- :param tool_parser: Custom logic that parses tool calls from a message.
- :param model: The model to use for the agent.
- :param instructions: The instructions for the agent.
- :param tools: A list of tools for the agent. Values can be one of the following:
- - dict representing a toolgroup/tool with arguments: e.g. {"name": "builtin::rag/knowledge_search", "args": {"vector_db_ids": [123]}}
- - a python function with a docstring. See @client_tool for more details.
- - str representing a tool within a toolgroup: e.g. "builtin::rag/knowledge_search"
- - str representing a toolgroup_id: e.g. "builtin::rag", "builtin::code_interpreter", where all tools in the toolgroup will be added to the agent
- - an instance of ClientTool: A client tool object.
- :param tool_config: The tool configuration for the agent.
- :param sampling_params: The sampling parameters for the agent.
- :param max_infer_iters: The maximum number of inference iterations.
- :param input_shields: The input shields for the agent.
- :param output_shields: The output shields for the agent.
- :param response_format: The response format for the agent.
- :param enable_session_persistence: Whether to enable session persistence.
- :param extra_headers: Extra headers to add to all requests sent by the agent.
- :param name: Optional name for the agent, used in telemetry and identification.
+ """Construct an Agent backed by the responses + conversations APIs.
+
+ Args:
+ client: An OpenAI-compatible client (e.g., openai.OpenAI()).
+ The client must support the responses and conversations APIs.
"""
self.client = client
-
- if agent_config is not None:
- logger.warning("`agent_config` is deprecated. Use inlined parameters instead.")
- if client_tools != ():
- logger.warning("`client_tools` is deprecated. Use `tools` instead.")
-
- # Construct agent_config from parameters if not provided
- if agent_config is None:
- agent_config = AgentUtils.get_agent_config(
- model=model,
- instructions=instructions,
- tools=tools,
- tool_config=tool_config,
- sampling_params=sampling_params,
- max_infer_iters=max_infer_iters,
- input_shields=input_shields,
- output_shields=output_shields,
- response_format=response_format,
- enable_session_persistence=enable_session_persistence,
- name=name,
- )
- client_tools = AgentUtils.get_client_tools(tools)
-
- self.agent_config = agent_config
- self.client_tools = {t.get_name(): t for t in client_tools}
- self.sessions = []
self.tool_parser = tool_parser
- self.builtin_tools = {}
self.extra_headers = extra_headers
- self.initialize()
+ self._model = model
+ self._instructions = instructions
- def initialize(self) -> None:
- agentic_system_create_response = self.client.alpha.agents.create(
- agent_config=self.agent_config,
- extra_headers=self.extra_headers,
- )
- self.agent_id = agentic_system_create_response.agent_id
- for tg in self.agent_config["toolgroups"]:
- toolgroup_id = tg if isinstance(tg, str) else tg.get("name")
- for tool in self.client.tools.list(toolgroup_id=toolgroup_id, extra_headers=self.extra_headers):
- self.builtin_tools[tool.name] = tg.get("args", {}) if isinstance(tg, dict) else {}
+ # Convert all tools to API format and separate client-side functions
+ self._tools, client_tools = AgentUtils.normalize_tools(tools)
+ self.client_tools = {tool.get_name(): tool for tool in client_tools}
+
+ self.sessions: List[str] = []
def create_session(self, session_name: str) -> str:
- agentic_system_create_session_response = self.client.alpha.agents.session.create(
- agent_id=self.agent_id,
- session_name=session_name,
+ conversation = self.client.conversations.create(
extra_headers=self.extra_headers,
+ metadata={"name": session_name},
)
- self.session_id = agentic_system_create_session_response.session_id
- self.sessions.append(self.session_id)
- return self.session_id
+ self.sessions.append(conversation.id)
+ return conversation.id
- def _run_tool_calls(self, tool_calls: List[ToolCall]) -> List[ToolResponseParam]:
- responses = []
+ def _run_tool_calls(self, tool_calls: List[ToolCall]) -> List[ToolResponsePayload]:
+ responses: List[ToolResponsePayload] = []
for tool_call in tool_calls:
- responses.append(self._run_single_tool(tool_call))
+ raw_result = self._run_single_tool(tool_call)
+ responses.append(ToolUtils.normalize_tool_response(raw_result))
return responses
- def _run_single_tool(self, tool_call: ToolCall) -> ToolResponseParam:
- # custom client tools
+ def _run_single_tool(self, tool_call: ToolCall) -> Any:
+ # Execute client-side tools
if tool_call.tool_name in self.client_tools:
tool = self.client_tools[tool_call.tool_name]
- # NOTE: tool.run() expects a list of messages, we only pass in last message here
- # but we could pass in the entire message history
result_message = tool.run(
[
CompletionMessage(
role="assistant",
- content=tool_call.tool_name,
+ content=tool_call.arguments,
tool_calls=[tool_call],
stop_reason="end_of_turn",
)
@@ -248,277 +165,201 @@ def _run_single_tool(self, tool_call: ToolCall) -> ToolResponseParam:
)
return result_message
- # builtin tools executed by tool_runtime
- if tool_call.tool_name in self.builtin_tools:
- tool_result = self.client.tool_runtime.invoke_tool(
- tool_name=tool_call.tool_name,
- kwargs={
- **tool_call.arguments,
- **self.builtin_tools[tool_call.tool_name],
- },
- extra_headers=self.extra_headers,
- )
- return ToolResponseParam(
- call_id=tool_call.call_id,
- tool_name=tool_call.tool_name,
- content=tool_result.content,
- )
-
- # cannot find tools
- return ToolResponseParam(
- call_id=tool_call.call_id,
- tool_name=tool_call.tool_name,
- content=f"Unknown tool `{tool_call.tool_name}` was called.",
- )
+ # Server-side tools should never reach here (they execute within response stream)
+ # If we get here, it's an error
+ return {
+ "call_id": tool_call.call_id,
+ "tool_name": tool_call.tool_name,
+ "content": f"Unknown tool `{tool_call.tool_name}` was called.",
+ }
def create_turn(
self,
- messages: List[Union[UserMessage, ToolResponseMessage]],
- session_id: Optional[str] = None,
- toolgroups: Optional[List[Toolgroup]] = None,
- documents: Optional[List[Document]] = None,
+ messages: List[Dict[str, Any]],
+ session_id: str,
stream: bool = True,
# TODO: deprecate this
extra_headers: Headers | None = None,
- ) -> Iterator[AgentTurnResponseStreamChunk] | Turn:
+ ) -> Iterator[AgentStreamChunk] | Any:
if stream:
- return self._create_turn_streaming(
- messages, session_id, toolgroups, documents, extra_headers=extra_headers or self.extra_headers
- )
+ return self._create_turn_streaming(messages, session_id, extra_headers=extra_headers or self.extra_headers)
else:
- chunks = [
- x
- for x in self._create_turn_streaming(
- messages,
- session_id,
- toolgroups,
- documents,
- extra_headers=extra_headers or self.extra_headers,
- )
- ]
- if not chunks:
+ last_chunk: Optional[AgentStreamChunk] = None
+ for chunk in self._create_turn_streaming(
+ messages, session_id, extra_headers=extra_headers or self.extra_headers
+ ):
+ last_chunk = chunk
+
+ if not last_chunk or not last_chunk.response:
raise Exception("Turn did not complete")
- last_chunk = chunks[-1]
- if hasattr(last_chunk, "error"):
- if "message" in last_chunk.error:
- error_msg = last_chunk.error["message"]
- else:
- error_msg = str(last_chunk.error)
- raise RuntimeError(f"Turn did not complete. Error: {error_msg}")
- try:
- return last_chunk.event.payload.turn
- except AttributeError:
- raise RuntimeError(f"Turn did not complete. Output: {last_chunk}") from None
+ return last_chunk.response
def _create_turn_streaming(
self,
- messages: List[Union[UserMessage, ToolResponseMessage]],
- session_id: Optional[str] = None,
- toolgroups: Optional[List[Toolgroup]] = None,
- documents: Optional[List[Document]] = None,
+ messages: List[Dict[str, Any]],
+ session_id: str,
# TODO: deprecate this
extra_headers: Headers | None = None,
- ) -> Iterator[AgentTurnResponseStreamChunk]:
- n_iter = 0
-
- # 1. create an agent turn
- turn_response = self.client.alpha.agents.turn.create(
- agent_id=self.agent_id,
- # use specified session_id or last session created
- session_id=session_id or self.session_id[-1],
- messages=messages,
- stream=True,
- documents=documents,
- toolgroups=toolgroups,
- extra_headers=extra_headers or self.extra_headers,
- )
+ ) -> Iterator[AgentStreamChunk]:
+ # Generate turn_id
+ turn_id = f"turn_{uuid4().hex[:12]}"
+
+ # Create synthesizer
+ synthesizer = TurnEventSynthesizer(session_id=session_id, turn_id=turn_id)
+
+ request_headers = extra_headers or self.extra_headers
+
+ # Main turn loop
+ while True:
+ # Create response stream
+ raw_stream = self.client.responses.create(
+ model=self._model,
+ instructions=self._instructions,
+ conversation=session_id,
+ input=messages,
+ tools=self._tools,
+ stream=True,
+ extra_headers=request_headers,
+ )
+
+ # Process events
+ function_calls_to_execute: List[ToolCall] = [] # Only client-side!
- # 2. process turn and resume if there's a tool call
- is_turn_complete = False
- while not is_turn_complete:
- is_turn_complete = True
- for chunk in turn_response:
- if hasattr(chunk, "error"):
- yield chunk
+ for high_level_event in synthesizer.process_raw_stream(raw_stream):
+ # Handle failures
+ if isinstance(high_level_event, TurnFailed):
+ yield AgentStreamChunk(event=high_level_event)
return
- tool_calls = AgentUtils.get_tool_calls(chunk, self.tool_parser)
- if not tool_calls:
- yield chunk
- else:
- is_turn_complete = False
- # End of turn is reached, do not resume even if there's a tool call
- # We only check for this if tool_parser is not set, because otherwise
- # tool call will be parsed on client side, and server will always return "end_of_turn"
- if not self.tool_parser and chunk.event.payload.turn.output_message.stop_reason in {"end_of_turn"}:
- yield chunk
- break
-
- turn_id = AgentUtils.get_turn_id(chunk)
- if n_iter == 0:
- yield chunk
-
- # run the tools
- tool_responses = self._run_tool_calls(tool_calls)
-
- # pass it to next iteration
- turn_response = self.client.alpha.agents.turn.resume(
- agent_id=self.agent_id,
- session_id=session_id or self.session_id[-1],
- turn_id=turn_id,
+
+ # Track function calls that need client execution
+ if isinstance(high_level_event, StepCompleted):
+ if high_level_event.step_type == "inference":
+ result = high_level_event.result
+ if result.function_calls: # Only client-side function calls
+ function_calls_to_execute = result.function_calls
+
+ yield AgentStreamChunk(event=high_level_event)
+
+ # If no client-side function calls, turn is done
+ if not function_calls_to_execute:
+ # Emit TurnCompleted
+ response = synthesizer.last_response
+ if not response:
+ raise RuntimeError("No response available")
+ for event in synthesizer.finish_turn():
+ yield AgentStreamChunk(event=event, response=response)
+ break
+
+ # Execute client-side tools (emit tool execution step events)
+ tool_step_id = f"{turn_id}_step_{synthesizer.step_counter}"
+ synthesizer.step_counter += 1
+
+ yield AgentStreamChunk(
+ event=StepStarted(
+ step_id=tool_step_id,
+ step_type="tool_execution",
+ turn_id=turn_id,
+ metadata={"server_side": False},
+ )
+ )
+
+ tool_responses = self._run_tool_calls(function_calls_to_execute)
+
+ yield AgentStreamChunk(
+ event=StepCompleted(
+ step_id=tool_step_id,
+ step_type="tool_execution",
+ turn_id=turn_id,
+ result=ToolExecutionStepResult(
+ step_id=tool_step_id,
+ tool_calls=function_calls_to_execute,
tool_responses=tool_responses,
- stream=True,
- extra_headers=extra_headers or self.extra_headers,
- )
- n_iter += 1
+ ),
+ )
+ )
- if self.tool_parser and n_iter > self.agent_config.get("max_infer_iters", DEFAULT_MAX_ITER):
- raise Exception("Max inference iterations reached")
+ # Continue loop with tool outputs as input
+ messages = [
+ {
+ "type": "function_call_output",
+ "call_id": payload["call_id"],
+ "output": payload["content"],
+ }
+ for payload in tool_responses
+ ]
class AsyncAgent:
def __init__(
self,
- client: LlamaStackClient,
- # begin deprecated
- agent_config: Optional[AgentConfig] = None,
- client_tools: Tuple[ClientTool, ...] = (),
- # end deprecated
+ client: Any, # Accept any async OpenAI-compatible client
+ *,
+ model: str,
+ instructions: str,
+ tools: Optional[List[Union[Dict[str, Any], ClientTool, Callable[..., Any]]]] = None,
tool_parser: Optional[ToolParser] = None,
- model: Optional[str] = None,
- instructions: Optional[str] = None,
- tools: Optional[List[Union[Toolgroup, ClientTool, Callable[..., Any]]]] = None,
- tool_config: Optional[ToolConfig] = None,
- sampling_params: Optional[SamplingParams] = None,
- max_infer_iters: Optional[int] = None,
- input_shields: Optional[List[str]] = None,
- output_shields: Optional[List[str]] = None,
- response_format: Optional[ResponseFormat] = None,
- enable_session_persistence: Optional[bool] = None,
extra_headers: Headers | None = None,
- name: str | None = None,
):
- """Construct an Agent with the given parameters.
-
- :param client: The LlamaStackClient instance.
- :param agent_config: The AgentConfig instance.
- ::deprecated: use other parameters instead
- :param client_tools: A tuple of ClientTool instances.
- ::deprecated: use tools instead
- :param tool_parser: Custom logic that parses tool calls from a message.
- :param model: The model to use for the agent.
- :param instructions: The instructions for the agent.
- :param tools: A list of tools for the agent. Values can be one of the following:
- - dict representing a toolgroup/tool with arguments: e.g. {"name": "builtin::rag/knowledge_search", "args": {"vector_db_ids": [123]}}
- - a python function with a docstring. See @client_tool for more details.
- - str representing a tool within a toolgroup: e.g. "builtin::rag/knowledge_search"
- - str representing a toolgroup_id: e.g. "builtin::rag", "builtin::code_interpreter", where all tools in the toolgroup will be added to the agent
- - an instance of ClientTool: A client tool object.
- :param tool_config: The tool configuration for the agent.
- :param sampling_params: The sampling parameters for the agent.
- :param max_infer_iters: The maximum number of inference iterations.
- :param input_shields: The input shields for the agent.
- :param output_shields: The output shields for the agent.
- :param response_format: The response format for the agent.
- :param enable_session_persistence: Whether to enable session persistence.
- :param extra_headers: Extra headers to add to all requests sent by the agent.
- :param name: Optional name for the agent, used in telemetry and identification.
+ """Construct an async Agent backed by the responses + conversations APIs.
+
+ Args:
+ client: An async OpenAI-compatible client (e.g., openai.AsyncOpenAI() or AsyncLlamaStackClient).
+ The client must support the responses and conversations APIs.
"""
self.client = client
- if agent_config is not None:
- logger.warning("`agent_config` is deprecated. Use inlined parameters instead.")
- if client_tools != ():
- logger.warning("`client_tools` is deprecated. Use `tools` instead.")
-
- # Construct agent_config from parameters if not provided
- if agent_config is None:
- agent_config = AgentUtils.get_agent_config(
- model=model,
- instructions=instructions,
- tools=tools,
- tool_config=tool_config,
- sampling_params=sampling_params,
- max_infer_iters=max_infer_iters,
- input_shields=input_shields,
- output_shields=output_shields,
- response_format=response_format,
- enable_session_persistence=enable_session_persistence,
- name=name,
- )
- client_tools = AgentUtils.get_client_tools(tools)
-
- self.agent_config = agent_config
- self.client_tools = {t.get_name(): t for t in client_tools}
- self.sessions = []
self.tool_parser = tool_parser
- self.builtin_tools = {}
self.extra_headers = extra_headers
- self._agent_id = None
-
- if isinstance(client, LlamaStackClient):
- raise ValueError("AsyncAgent must be initialized with an AsyncLlamaStackClient")
+ self._model = model
+ self._instructions = instructions
- @property
- def agent_id(self) -> str:
- if not self._agent_id:
- raise RuntimeError("Agent ID not initialized. Call initialize() first.")
- return self._agent_id
+ # Convert all tools to API format and separate client-side functions
+ self._tools, client_tools = AgentUtils.normalize_tools(tools)
+ self.client_tools = {tool.get_name(): tool for tool in client_tools}
- async def initialize(self) -> None:
- if self._agent_id:
- return
-
- agentic_system_create_response = await self.client.alpha.agents.create(
- agent_config=self.agent_config,
- )
- self._agent_id = agentic_system_create_response.agent_id
- for tg in self.agent_config["toolgroups"]:
- for tool in await self.client.tools.list(toolgroup_id=tg, extra_headers=self.extra_headers):
- self.builtin_tools[tool.name] = tg.get("args", {}) if isinstance(tg, dict) else {}
+ self.sessions: List[str] = []
async def create_session(self, session_name: str) -> str:
- await self.initialize()
- agentic_system_create_session_response = await self.client.alpha.agents.session.create(
- agent_id=self.agent_id,
- session_name=session_name,
+ conversation = await self.client.conversations.create( # type: ignore[union-attr]
extra_headers=self.extra_headers,
+ metadata={"name": session_name},
)
- self.session_id = agentic_system_create_session_response.session_id
- self.sessions.append(self.session_id)
- return self.session_id
+ self.sessions.append(conversation.id)
+ return conversation.id
async def create_turn(
self,
- messages: List[Union[UserMessage, ToolResponseMessage]],
- session_id: Optional[str] = None,
- toolgroups: Optional[List[Toolgroup]] = None,
- documents: Optional[List[Document]] = None,
+ messages: List[Dict[str, Any]],
+ session_id: str,
stream: bool = True,
- ) -> AsyncIterator[AgentTurnResponseStreamChunk] | Turn:
+ ) -> AsyncIterator[AgentStreamChunk] | Any:
if stream:
- return self._create_turn_streaming(messages, session_id, toolgroups, documents)
+ return self._create_turn_streaming(messages, session_id)
else:
- chunks = [x async for x in self._create_turn_streaming(messages, session_id, toolgroups, documents)]
- if not chunks:
+ last_chunk: Optional[AgentStreamChunk] = None
+ async for chunk in self._create_turn_streaming(messages, session_id):
+ last_chunk = chunk
+ if not last_chunk or not last_chunk.response:
raise Exception("Turn did not complete")
- return chunks[-1].event.payload.turn
+ return last_chunk.response
- async def _run_tool_calls(self, tool_calls: List[ToolCall]) -> List[ToolResponseParam]:
- responses = []
+ async def _run_tool_calls(self, tool_calls: List[ToolCall]) -> List[ToolResponsePayload]:
+ responses: List[ToolResponsePayload] = []
for tool_call in tool_calls:
- responses.append(await self._run_single_tool(tool_call))
+ raw_result = await self._run_single_tool(tool_call)
+ responses.append(ToolUtils.normalize_tool_response(raw_result))
return responses
- async def _run_single_tool(self, tool_call: ToolCall) -> ToolResponseParam:
- # custom client tools
+ async def _run_single_tool(self, tool_call: ToolCall) -> Any:
+ # Execute client-side tools
if tool_call.tool_name in self.client_tools:
tool = self.client_tools[tool_call.tool_name]
result_message = await tool.async_run(
[
CompletionMessage(
role="assistant",
- content=tool_call.tool_name,
+ content=tool_call.arguments,
tool_calls=[tool_call],
stop_reason="end_of_turn",
)
@@ -526,86 +367,181 @@ async def _run_single_tool(self, tool_call: ToolCall) -> ToolResponseParam:
)
return result_message
- # builtin tools executed by tool_runtime
- if tool_call.tool_name in self.builtin_tools:
- tool_result = await self.client.tool_runtime.invoke_tool(
- tool_name=tool_call.tool_name,
- kwargs={
- **tool_call.arguments,
- **self.builtin_tools[tool_call.tool_name],
- },
- extra_headers=self.extra_headers,
- )
- return ToolResponseParam(
- call_id=tool_call.call_id,
- tool_name=tool_call.tool_name,
- content=tool_result.content,
- )
-
- # cannot find tools
- return ToolResponseParam(
- call_id=tool_call.call_id,
- tool_name=tool_call.tool_name,
- content=f"Unknown tool `{tool_call.tool_name}` was called.",
- )
+ # Server-side tools should never reach here (they execute within response stream)
+ # If we get here, it's an error
+ return {
+ "call_id": tool_call.call_id,
+ "tool_name": tool_call.tool_name,
+ "content": f"Unknown tool `{tool_call.tool_name}` was called.",
+ }
async def _create_turn_streaming(
self,
- messages: List[Union[UserMessage, ToolResponseMessage]],
- session_id: Optional[str] = None,
- toolgroups: Optional[List[Toolgroup]] = None,
- documents: Optional[List[Document]] = None,
- ) -> AsyncIterator[AgentTurnResponseStreamChunk]:
- n_iter = 0
-
- # 1. create an agent turn
- turn_response = await self.client.alpha.agents.turn.create(
- agent_id=self.agent_id,
- # use specified session_id or last session created
- session_id=session_id or self.session_id[-1],
- messages=messages,
- stream=True,
- documents=documents,
- toolgroups=toolgroups,
- extra_headers=self.extra_headers,
- )
+ messages: List[Dict[str, Any]],
+ session_id: str,
+ ) -> AsyncIterator[AgentStreamChunk]:
+ await self.initialize()
- # 2. process turn and resume if there's a tool call
- is_turn_complete = False
- while not is_turn_complete:
- is_turn_complete = True
- async for chunk in turn_response:
- if hasattr(chunk, "error"):
- yield chunk
+ # Generate turn_id
+ turn_id = f"turn_{uuid4().hex[:12]}"
+
+ # Create synthesizer
+ synthesizer = TurnEventSynthesizer(session_id=session_id, turn_id=turn_id)
+
+ request_headers = self.extra_headers
+
+ # Main turn loop
+ while True:
+ # Create response stream
+ raw_stream = await self.client.responses.create(
+ model=self._model,
+ instructions=self._instructions,
+ conversation=session_id,
+ input=messages,
+ tools=self._tools,
+ stream=True,
+ extra_headers=request_headers,
+ )
+
+ # Process events
+ function_calls_to_execute: List[ToolCall] = [] # Only client-side!
+
+ for high_level_event in synthesizer.process_raw_stream(raw_stream):
+ # Handle failures
+ if isinstance(high_level_event, TurnFailed):
+ yield AgentStreamChunk(event=high_level_event)
return
- tool_calls = AgentUtils.get_tool_calls(chunk, self.tool_parser)
- if not tool_calls:
- yield chunk
- else:
- is_turn_complete = False
- # End of turn is reached, do not resume even if there's a tool call
- if not self.tool_parser and chunk.event.payload.turn.output_message.stop_reason in {"end_of_turn"}:
- yield chunk
- break
-
- turn_id = AgentUtils.get_turn_id(chunk)
- if n_iter == 0:
- yield chunk
-
- # run the tools
- tool_responses = await self._run_tool_calls(tool_calls)
-
- # pass it to next iteration
- turn_response = await self.client.alpha.agents.turn.resume(
- agent_id=self.agent_id,
- session_id=session_id or self.session_id[-1],
- turn_id=turn_id,
+ # Track function calls that need client execution
+ if isinstance(high_level_event, StepCompleted):
+ if high_level_event.step_type == "inference":
+ result = high_level_event.result
+ if result.function_calls: # Only client-side function calls
+ function_calls_to_execute = result.function_calls
+
+ yield AgentStreamChunk(event=high_level_event)
+
+ # If no client-side function calls, turn is done
+ if not function_calls_to_execute:
+ # Emit TurnCompleted
+ response = synthesizer.last_response
+ if not response:
+ raise RuntimeError("No response available")
+ for event in synthesizer.finish_turn():
+ yield AgentStreamChunk(event=event, response=response)
+ break
+
+ # Execute client-side tools (emit tool execution step events)
+ tool_step_id = f"{turn_id}_step_{synthesizer.step_counter}"
+ synthesizer.step_counter += 1
+
+ yield AgentStreamChunk(
+ event=StepStarted(
+ step_id=tool_step_id,
+ step_type="tool_execution",
+ turn_id=turn_id,
+ metadata={"server_side": False},
+ )
+ )
+
+ tool_responses = await self._run_tool_calls(function_calls_to_execute)
+
+ yield AgentStreamChunk(
+ event=StepCompleted(
+ step_id=tool_step_id,
+ step_type="tool_execution",
+ turn_id=turn_id,
+ result=ToolExecutionStepResult(
+ step_id=tool_step_id,
+ tool_calls=function_calls_to_execute,
tool_responses=tool_responses,
- stream=True,
- extra_headers=self.extra_headers,
- )
- n_iter += 1
+ ),
+ )
+ )
+
+ # Continue loop with tool outputs as input
+ messages = [
+ {
+ "type": "function_call_output",
+ "call_id": payload["call_id"],
+ "output": payload["content"],
+ }
+ for payload in tool_responses
+ ]
+
- if self.tool_parser and n_iter > self.agent_config.get("max_infer_iters", DEFAULT_MAX_ITER):
- raise Exception("Max inference iterations reached")
+class AgentUtils:
+ @staticmethod
+ def get_client_tools(
+ tools: Optional[List[Union[Dict[str, Any], ClientTool, Callable[..., Any]]]],
+ ) -> List[ClientTool]:
+ if not tools:
+ return []
+
+ # Wrap any function in client_tool decorator
+ tools = [client_tool(tool) if (callable(tool) and not isinstance(tool, ClientTool)) else tool for tool in tools]
+ return [tool for tool in tools if isinstance(tool, ClientTool)]
+
+ @staticmethod
+ def get_tool_calls(chunk: AgentStreamChunk, tool_parser: Optional[ToolParser] = None) -> List[ToolCall]:
+ if not isinstance(chunk.event, StepProgress):
+ return []
+
+ delta = chunk.event.delta
+ if not isinstance(delta, ToolCallIssuedDelta) or delta.tool_type != "function":
+ return []
+
+ tool_call = ToolCall(
+ call_id=delta.call_id,
+ tool_name=delta.tool_name,
+ arguments=delta.arguments,
+ )
+
+ if tool_parser:
+ completion = CompletionMessage(
+ role="assistant",
+ content="",
+ tool_calls=[tool_call],
+ stop_reason="end_of_turn",
+ )
+ return tool_parser.get_tool_calls(completion)
+
+ return [tool_call]
+
+ @staticmethod
+ def get_turn_id(chunk: AgentStreamChunk) -> Optional[str]:
+ return chunk.response.turn.turn_id if chunk.response else None
+
+ @staticmethod
+ def normalize_tools(
+ tools: Optional[List[Union[Dict[str, Any], ClientTool, Callable[..., Any]]]],
+ ) -> Tuple[List[Dict[str, Any]], List[ClientTool]]:
+ """Convert all tools to API format dicts.
+
+ Returns:
+ - List of tool dicts for responses.create(tools=...)
+ - List of ClientTool instances for client-side execution
+ """
+ if not tools:
+ return [], []
+
+ tool_dicts: List[Dict[str, Any]] = []
+ client_tool_instances: List[ClientTool] = []
+
+ for tool in tools:
+ # Convert callable to ClientTool
+ if callable(tool) and not isinstance(tool, ClientTool):
+ tool = client_tool(tool)
+
+ if isinstance(tool, ClientTool):
+ # Convert ClientTool to function tool dict
+ tool_def = tool.get_tool_definition()
+ tool_dicts.append(tool_def)
+ client_tool_instances.append(tool)
+ elif isinstance(tool, dict):
+ # Server-side tool dict (file_search, web_search, etc.)
+ tool_dicts.append(tool) # type: ignore[arg-type]
+ else:
+ raise TypeError(f"Unsupported tool type: {type(tool)!r}")
+
+ return tool_dicts, client_tool_instances
diff --git a/src/llama_stack_client/lib/agents/client_tool.py b/src/llama_stack_client/lib/agents/client_tool.py
index 09164361..63a5bfd7 100644
--- a/src/llama_stack_client/lib/agents/client_tool.py
+++ b/src/llama_stack_client/lib/agents/client_tool.py
@@ -21,9 +21,7 @@
from typing_extensions import TypedDict
-from llama_stack_client.types import CompletionMessage, Message
-from llama_stack_client.types.alpha import ToolResponse
-from llama_stack_client.types.tool_def_param import ToolDefParam
+from .types import CompletionMessage, Message, ToolDefinition, ToolResponse
class JSONSchema(TypedDict, total=False):
@@ -61,13 +59,13 @@ def get_input_schema(self) -> JSONSchema:
def get_instruction_string(self) -> str:
return f"Use the function '{self.get_name()}' to: {self.get_description()}"
- def get_tool_definition(self) -> ToolDefParam:
- return ToolDefParam(
- name=self.get_name(),
- description=self.get_description(),
- input_schema=self.get_input_schema(),
- metadata={},
- )
+ def get_tool_definition(self) -> ToolDefinition:
+ return {
+ "type": "function",
+ "name": self.get_name(),
+ "description": self.get_description(),
+ "parameters": self.get_input_schema(),
+ }
def run(
self,
@@ -82,7 +80,6 @@ def run(
metadata = {}
try:
params = json.loads(tool_call.arguments)
-
response = self.run_impl(**params)
if isinstance(response, dict) and "content" in response:
content = json.dumps(response["content"], ensure_ascii=False)
@@ -108,7 +105,8 @@ async def async_run(
tool_call = last_message.tool_calls[0]
metadata = {}
try:
- response = await self.async_run_impl(**tool_call.arguments)
+ params = json.loads(tool_call.arguments)
+ response = await self.async_run_impl(**params)
if isinstance(response, dict) and "content" in response:
content = json.dumps(response["content"], ensure_ascii=False)
metadata = response.get("metadata", {})
diff --git a/src/llama_stack_client/lib/agents/event_logger.py b/src/llama_stack_client/lib/agents/event_logger.py
index b4e1a219..8b56f398 100644
--- a/src/llama_stack_client/lib/agents/event_logger.py
+++ b/src/llama_stack_client/lib/agents/event_logger.py
@@ -4,142 +4,131 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-from typing import Any, Iterator, Optional
-
-from termcolor import cprint
-
-from llama_stack_client.types import InterleavedContent
-
-
-def interleaved_content_as_str(content: InterleavedContent, sep: str = " ") -> str:
- def _process(c: Any) -> str:
- if isinstance(c, str):
- return c
- elif hasattr(c, "type"):
- if c.type == "text":
- return c.text
- elif c.type == "image":
- return ""
- else:
- raise ValueError(f"Unexpected type {c}")
- else:
- raise ValueError(f"Unsupported content type: {type(c)}")
-
- if isinstance(content, list):
- return sep.join(_process(c) for c in content)
- else:
- return _process(content)
-
-
-class TurnStreamPrintableEvent:
- def __init__(
- self,
- role: Optional[str] = None,
- content: str = "",
- end: Optional[str] = "\n",
- color: str = "white",
- ) -> None:
- self.role = role
- self.content = content
- self.color = color
- self.end = "\n" if end is None else end
-
- def __str__(self) -> str:
- if self.role is not None:
- return f"{self.role}> {self.content}"
- else:
- return f"{self.content}"
-
- def print(self, flush: bool = True) -> None:
- cprint(f"{str(self)}", color=self.color, end=self.end, flush=flush)
-
-
-class TurnStreamEventPrinter:
- def yield_printable_events(self, chunk: Any) -> Iterator[TurnStreamPrintableEvent]:
- for printable_event in self._yield_printable_events(chunk):
- yield printable_event
-
- def _yield_printable_events(self, chunk: Any) -> Iterator[TurnStreamPrintableEvent]:
- if hasattr(chunk, "error"):
- yield TurnStreamPrintableEvent(role=None, content=chunk.error["message"], color="red")
- return
-
- event = chunk.event
- event_type = event.payload.event_type
-
- if event_type in {"turn_start", "turn_complete", "turn_awaiting_input"}:
- # Currently not logging any turn realted info
- yield TurnStreamPrintableEvent(role=None, content="", end="", color="grey")
- return
-
- step_type = event.payload.step_type
- # handle safety
- if step_type == "shield_call" and event_type == "step_complete":
- violation = event.payload.step_details.violation
- if not violation:
- yield TurnStreamPrintableEvent(role=step_type, content="No Violation", color="magenta")
- else:
- yield TurnStreamPrintableEvent(
- role=step_type,
- content=f"{violation.metadata} {violation.user_message}",
- color="red",
- )
-
- # handle inference
- if step_type == "inference":
- if event_type == "step_start":
- yield TurnStreamPrintableEvent(role=step_type, content="", end="", color="yellow")
- elif event_type == "step_progress":
- if event.payload.delta.type == "tool_call":
- if isinstance(event.payload.delta.tool_call, str):
- yield TurnStreamPrintableEvent(
- role=None,
- content=event.payload.delta.tool_call,
- end="",
- color="cyan",
- )
- elif event.payload.delta.type == "text":
- yield TurnStreamPrintableEvent(
- role=None,
- content=event.payload.delta.text,
- end="",
- color="yellow",
- )
- else:
- # step complete
- yield TurnStreamPrintableEvent(role=None, content="")
-
- # handle tool_execution
- if step_type == "tool_execution" and event_type == "step_complete":
- # Only print tool calls and responses at the step_complete event
- details = event.payload.step_details
- for t in details.tool_calls:
- yield TurnStreamPrintableEvent(
- role=step_type,
- content=f"Tool:{t.tool_name} Args:{t.arguments}",
- color="green",
- )
-
- for r in details.tool_responses:
- if r.tool_name == "query_from_memory":
- inserted_context = interleaved_content_as_str(r.content)
- content = f"fetched {len(inserted_context)} bytes from memory"
-
- yield TurnStreamPrintableEvent(
- role=step_type,
- content=content,
- color="cyan",
- )
- else:
- yield TurnStreamPrintableEvent(
- role=step_type,
- content=f"Tool:{r.tool_name} Response:{r.content}",
- color="green",
- )
-
-
-class EventLogger:
- def log(self, event_generator: Iterator[Any]) -> Iterator[TurnStreamPrintableEvent]:
- printer = TurnStreamEventPrinter()
+"""Event logger for agent interactions.
+
+This module provides a simple logger that converts agent stream events
+into human-readable printable strings for console output.
+"""
+
+from typing import Iterator
+
+from .turn_events import (
+ AgentStreamChunk,
+ TurnStarted,
+ TurnCompleted,
+ TurnFailed,
+ StepStarted,
+ StepProgress,
+ StepCompleted,
+ TextDelta,
+ ToolCallIssuedDelta,
+ ToolCallDelta,
+)
+
+__all__ = ["AgentEventLogger", "EventLogger"]
+
+
+class AgentEventLogger:
+ """Logger for agent events with turn/step semantics.
+
+ This logger converts high-level agent events into printable strings
+ that can be displayed to users. It handles:
+ - Turn lifecycle events
+ - Step boundaries (inference, tool execution)
+ - Streaming content (text, tool calls)
+ - Server-side and client-side tool execution
+
+ Usage:
+ logger = AgentEventLogger()
+ for chunk in agent.create_turn(...):
+ for printable in logger.log([chunk]):
+ print(printable, end="", flush=True)
+ """
+
+ def log(self, event_generator: Iterator[AgentStreamChunk]) -> Iterator[str]:
+ """Generate printable strings from agent stream chunks.
+
+ Args:
+ event_generator: Iterator of AgentStreamChunk objects
+
+ Yields:
+ Printable string fragments
+ """
for chunk in event_generator:
- yield from printer.yield_printable_events(chunk)
+ event = chunk.event
+
+ if isinstance(event, TurnStarted):
+ # Optionally log turn start (commented out to reduce noise)
+ # yield f"[Turn {event.turn_id}]\n"
+ pass
+
+ elif isinstance(event, StepStarted):
+ if event.step_type == "inference":
+ # Indicate model is thinking (no newline)
+ yield "š¤ "
+ elif event.step_type == "tool_execution":
+ # Indicate tools are executing
+ server_side = event.metadata and event.metadata.get("server_side", False)
+ if server_side:
+ tool_type = event.metadata.get("tool_type", "tool")
+ yield f"\nš§ Executing {tool_type} (server-side)...\n"
+ else:
+ yield "\nš§ Executing function tools (client-side)...\n"
+
+ elif isinstance(event, StepProgress):
+ if event.step_type == "inference":
+ if isinstance(event.delta, TextDelta):
+ # Stream text as it comes
+ yield event.delta.text
+
+ elif isinstance(event.delta, ToolCallIssuedDelta):
+ # Log client-side function calls (server-side handled as separate tool_execution steps)
+ if event.delta.tool_type == "function":
+ # Client-side function call
+ yield f"\nš Function call: {event.delta.tool_name}({event.delta.arguments})"
+
+ elif isinstance(event.delta, ToolCallDelta):
+ # Optionally stream tool arguments (can be noisy, so commented out)
+ # yield event.delta.arguments_delta
+ pass
+
+ elif event.step_type == "tool_execution":
+ # Handle tool execution progress (for server-side tools)
+ if isinstance(event.delta, ToolCallIssuedDelta):
+ # Don't log again, already logged at StepStarted
+ pass
+ elif isinstance(event.delta, ToolCallDelta):
+ # Optionally log argument streaming
+ pass
+
+ elif isinstance(event, StepCompleted):
+ if event.step_type == "inference":
+ result = event.result
+ # Server-side tools already logged during progress
+ if not result.function_calls:
+ # End of inference with no function calls
+ yield "\n"
+
+ elif event.step_type == "tool_execution":
+ # Log client-side tool execution results
+ result = event.result
+ for resp in result.tool_responses:
+ tool_name = resp.get("tool_name", "unknown")
+ content = resp.get("content", "")
+ # Truncate long responses for readability
+ if isinstance(content, str) and len(content) > 100:
+ content = content[:100] + "..."
+ yield f" ā {tool_name}: {content}\n"
+
+ elif isinstance(event, TurnCompleted):
+ # Optionally log turn completion (commented out to reduce noise)
+ # yield f"\n[Completed in {event.num_steps} steps]\n"
+ pass
+
+ elif isinstance(event, TurnFailed):
+ # Always log failures
+ yield f"\nā Turn failed: {event.error_message}\n"
+
+
+# Alias for backwards compatibility
+EventLogger = AgentEventLogger
diff --git a/src/llama_stack_client/lib/agents/event_synthesizer.py b/src/llama_stack_client/lib/agents/event_synthesizer.py
new file mode 100644
index 00000000..22ce5c24
--- /dev/null
+++ b/src/llama_stack_client/lib/agents/event_synthesizer.py
@@ -0,0 +1,449 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""Translate Responses API stream events into structured turn events.
+
+TurnEventSynthesizer keeps just enough state to expose turns and steps for
+agents. It consumes the raw Responses API stream and emits the higher-level
+events defined in ``turn_events.py`` without introducing an intermediate
+low-level event layer.
+"""
+
+from __future__ import annotations
+
+import json
+from dataclasses import dataclass
+from typing import Any, Dict, Iterable, Iterator, List, Optional
+
+from logging import getLogger
+
+from .types import ToolCall
+
+from .turn_events import (
+ AgentEvent,
+ InferenceStepResult,
+ StepCompleted,
+ StepProgress,
+ StepStarted,
+ TextDelta,
+ ToolCallDelta,
+ ToolCallIssuedDelta,
+ ToolExecutionStepResult,
+ TurnCompleted,
+ TurnFailed,
+ TurnStarted,
+)
+
+logger = getLogger(__name__)
+
+
+@dataclass
+class _ToolCallState:
+ call_id: str
+ tool_name: str
+ tool_type: str
+ server_side: bool
+ arguments: str = ""
+
+ def update(self, *, delta: Optional[str] = None, final: Optional[str] = None) -> None:
+ if final is not None:
+ self.arguments = final or "{}"
+ elif delta:
+ self.arguments += delta
+
+ def as_tool_call(self) -> ToolCall:
+ payload = self.arguments or "{}"
+ return ToolCall(call_id=self.call_id, tool_name=self.tool_name, arguments=payload)
+
+
+class TurnEventSynthesizer:
+ """Produce turn/step events directly from Responses API streaming events."""
+
+ def __init__(self, session_id: str, turn_id: str):
+ self.session_id = session_id
+ self.turn_id = turn_id
+
+ self.step_counter = 0
+ self.current_step_id: Optional[str] = None
+ self.current_step_type: Optional[str] = None
+
+ self.current_response_id: Optional[str] = None
+ self.text_parts: List[str] = []
+ self._function_call_ids: List[str] = []
+ self._tool_calls: Dict[str, _ToolCallState] = {}
+
+ self.turn_started = False
+ self.all_response_ids: List[str] = []
+ self.last_response: Optional[Any] = None
+
+ # ------------------------------------------------------------------ helpers
+
+ def _next_step_id(self) -> str:
+ step_id = f"{self.turn_id}_step_{self.step_counter}"
+ self.step_counter += 1
+ return step_id
+
+ def _maybe_emit_turn_started(self) -> Iterator[AgentEvent]:
+ if not self.turn_started:
+ self.turn_started = True
+ yield TurnStarted(turn_id=self.turn_id, session_id=self.session_id)
+
+ def _start_inference_step(
+ self, *, response_id: Optional[str] = None, reset_tool_state: bool = False
+ ) -> Iterator[AgentEvent]:
+ if response_id:
+ self.current_response_id = response_id
+ if reset_tool_state:
+ self._tool_calls = {}
+ self.text_parts = []
+ self._function_call_ids = []
+ self.current_step_id = self._next_step_id()
+ self.current_step_type = "inference"
+ yield StepStarted(step_id=self.current_step_id, step_type="inference", turn_id=self.turn_id)
+
+ def _complete_inference_step(self, *, stop_reason: str, response_id: Optional[str] = None) -> Iterator[AgentEvent]:
+ if self.current_step_type != "inference":
+ return
+ step_id = self.current_step_id or self._next_step_id()
+ resolved_response_id = response_id or self.current_response_id or ""
+ self.current_response_id = resolved_response_id
+
+ function_calls: List[ToolCall] = []
+ for call_id in self._function_call_ids:
+ state = self._tool_calls.get(call_id)
+ if state is None:
+ continue
+ function_calls.append(state.as_tool_call())
+
+ yield StepCompleted(
+ step_id=step_id,
+ step_type="inference",
+ turn_id=self.turn_id,
+ result=InferenceStepResult(
+ step_id=step_id,
+ response_id=resolved_response_id,
+ text_content="".join(self.text_parts),
+ function_calls=function_calls,
+ server_tool_executions=[],
+ stop_reason=stop_reason,
+ ),
+ )
+
+ for call_id in list(self._function_call_ids):
+ # Drop client-side call state once we hand it back to the agent.
+ self._tool_calls.pop(call_id, None)
+ self._function_call_ids = []
+ self.text_parts = []
+ self.current_step_id = None
+ self.current_step_type = None
+
+ def _start_tool_execution_step(self, call_state: _ToolCallState) -> Iterator[AgentEvent]:
+ self.current_step_id = self._next_step_id()
+ self.current_step_type = "tool_execution"
+ yield StepStarted(
+ step_id=self.current_step_id,
+ step_type="tool_execution",
+ turn_id=self.turn_id,
+ metadata={"server_side": True, "tool_type": call_state.tool_type, "tool_name": call_state.tool_name},
+ )
+ yield StepProgress(
+ step_id=self.current_step_id,
+ step_type="tool_execution",
+ turn_id=self.turn_id,
+ delta=ToolCallIssuedDelta(
+ call_id=call_state.call_id,
+ tool_type=call_state.tool_type, # type: ignore[arg-type]
+ tool_name=call_state.tool_name,
+ arguments=call_state.arguments or "{}",
+ ),
+ )
+
+ def _complete_tool_execution_step(self, call_state: _ToolCallState) -> Iterator[AgentEvent]:
+ if self.current_step_type != "tool_execution":
+ return
+ step_id = self.current_step_id or self._next_step_id()
+ yield StepCompleted(
+ step_id=step_id,
+ step_type="tool_execution",
+ turn_id=self.turn_id,
+ result=ToolExecutionStepResult(
+ step_id=step_id,
+ tool_calls=[call_state.as_tool_call()],
+ tool_responses=[],
+ ),
+ )
+ self._tool_calls.pop(call_state.call_id, None)
+ self.current_step_id = None
+ self.current_step_type = None
+
+ @staticmethod
+ def _coerce_arguments(payload: Any) -> Optional[str]:
+ if payload is None:
+ return None
+ if isinstance(payload, str):
+ return payload
+ try:
+ return json.dumps(payload)
+ except Exception: # pragma: no cover - defensive
+ return str(payload)
+
+ def _register_tool_call(
+ self,
+ *,
+ call_id: Optional[str],
+ tool_name: str,
+ tool_type: str,
+ arguments: Optional[str],
+ ) -> _ToolCallState:
+ resolved_call_id = call_id or f"{tool_name}_{len(self._tool_calls)}"
+ state = _ToolCallState(
+ call_id=resolved_call_id,
+ tool_name=tool_name,
+ tool_type=tool_type,
+ server_side=tool_type != "function",
+ )
+ if arguments:
+ state.update(final=arguments)
+ self._tool_calls[resolved_call_id] = state
+ return state
+
+ def _classify_tool_type(self, tool_name: str) -> str:
+ server_side_tools = {
+ "file_search",
+ "file_search_call",
+ "knowledge_search",
+ "web_search",
+ "web_search_call",
+ "query_from_memory",
+ "mcp_call",
+ "mcp_list_tools",
+ "memory_retrieval",
+ }
+
+ if tool_name in server_side_tools:
+ if tool_name in {"file_search_call", "knowledge_search"}:
+ return "file_search"
+ if tool_name == "web_search_call":
+ return "web_search"
+ return tool_name
+
+ return "function"
+
+ # ------------------------------------------------------------------ handlers
+
+ def process_raw_stream(self, events: Iterable[Any]) -> Iterator[AgentEvent]:
+ current_response_id: Optional[str] = None
+
+ for event in events:
+ yield from self._maybe_emit_turn_started()
+
+ response_id = getattr(event, "response_id", None)
+ if response_id is None and hasattr(event, "response"):
+ response = getattr(event, "response")
+ response_id = getattr(response, "id", None)
+ if response_id is not None:
+ current_response_id = response_id
+
+ event_type = getattr(event, "type", None)
+ if not event_type:
+ logger.debug("Unhandled stream event with no type: %r", event)
+ continue
+
+ if event_type == "response.in_progress":
+ response = getattr(event, "response", None)
+ response_id = getattr(response, "id", current_response_id)
+ if response_id is None:
+ continue
+ if not self.all_response_ids or self.all_response_ids[-1] != response_id:
+ self.all_response_ids.append(response_id)
+ yield from self._start_inference_step(response_id=response_id, reset_tool_state=True)
+
+ elif event_type == "response.output_text.delta":
+ if self.current_step_type != "inference":
+ continue
+ text = getattr(event, "delta", "") or ""
+ if not text:
+ continue
+ self.text_parts.append(text)
+ yield StepProgress(
+ step_id=self.current_step_id or "",
+ step_type="inference",
+ turn_id=self.turn_id,
+ delta=TextDelta(text=text),
+ )
+
+ elif event_type == "response.output_text.done":
+ # Text completions are tracked via the final StepCompleted event.
+ continue
+
+ elif event_type == "response.output_item.added":
+ yield from self._handle_output_item_added(getattr(event, "item", None))
+
+ elif event_type == "response.output_item.delta":
+ yield from self._handle_output_item_delta(getattr(event, "delta", None))
+
+ elif event_type == "response.output_item.done":
+ yield from self._handle_output_item_done(getattr(event, "item", None))
+
+ elif event_type == "response.completed":
+ response = getattr(event, "response", None)
+ if response is not None:
+ self.last_response = response
+ stop_reason = "tool_calls" if self._function_call_ids else "end_of_turn"
+ yield from self._complete_inference_step(stop_reason=stop_reason, response_id=current_response_id)
+
+ elif event_type == "response.failed":
+ response = getattr(event, "response", None)
+ error_obj = getattr(response, "error", None)
+ error_message = getattr(error_obj, "message", None) if error_obj else None
+ yield TurnFailed(
+ turn_id=self.turn_id,
+ session_id=self.session_id,
+ error_message=error_message or "Unknown error",
+ )
+
+ else: # pragma: no cover - depends on streaming responses
+ # Allow unknown streaming events to pass silently; they are often ancillary metadata.
+ continue
+
+ def _handle_output_item_added(self, item: Any) -> Iterator[AgentEvent]:
+ if item is None:
+ return
+ item_type = getattr(item, "type", None)
+ if item_type is None:
+ return
+
+ if item_type == "message":
+ # Messages mirror text deltas, nothing extra to emit.
+ return
+
+ if item_type in {
+ "function_call",
+ "web_search",
+ "web_search_call",
+ "mcp_call",
+ "mcp_list_tools",
+ "file_search_call",
+ }:
+ call_id = getattr(item, "call_id", None) or getattr(item, "id", None)
+ tool_name = getattr(item, "name", None) or getattr(item, "type", "")
+ arguments = self._coerce_arguments(getattr(item, "arguments", None))
+ tool_type = self._classify_tool_type(tool_name if item_type == "function_call" else item_type)
+
+ state = self._register_tool_call(
+ call_id=call_id,
+ tool_name=tool_name,
+ tool_type=tool_type,
+ arguments=arguments,
+ )
+
+ if state.server_side:
+ yield from self._complete_inference_step(
+ stop_reason="server_tool_call", response_id=self.current_response_id
+ )
+ yield from self._start_tool_execution_step(state)
+ else:
+ if state.call_id not in self._function_call_ids:
+ self._function_call_ids.append(state.call_id)
+ yield StepProgress(
+ step_id=self.current_step_id or "",
+ step_type="inference",
+ turn_id=self.turn_id,
+ delta=ToolCallIssuedDelta(
+ call_id=state.call_id,
+ tool_type="function",
+ tool_name=state.tool_name,
+ arguments=state.arguments or "{}",
+ ),
+ )
+
+ def _handle_output_item_delta(self, delta: Any) -> Iterator[AgentEvent]:
+ if delta is None:
+ return
+ delta_type = getattr(delta, "type", None)
+ if delta_type not in {
+ "function_call",
+ "web_search",
+ "web_search_call",
+ "mcp_call",
+ "mcp_list_tools",
+ "file_search_call",
+ }:
+ return
+
+ call_id = getattr(delta, "call_id", None) or getattr(delta, "id", None)
+ if call_id is None:
+ return
+
+ arguments_delta = getattr(delta, "arguments_delta", None)
+ if arguments_delta is None:
+ arguments_delta = getattr(delta, "arguments", None)
+ if arguments_delta is None and isinstance(delta, dict):
+ arguments_delta = delta.get("arguments_delta") or delta.get("arguments")
+ if arguments_delta is None:
+ return
+
+ state = self._tool_calls.get(call_id)
+ if state is None:
+ return
+
+ state.update(delta=arguments_delta)
+ step_type = "tool_execution" if state.server_side else "inference"
+ yield StepProgress(
+ step_id=self.current_step_id or "",
+ step_type=step_type, # type: ignore[arg-type]
+ turn_id=self.turn_id,
+ delta=ToolCallDelta(call_id=state.call_id, arguments_delta=arguments_delta),
+ )
+
+ def _handle_output_item_done(self, item: Any) -> Iterator[AgentEvent]:
+ if item is None:
+ return
+
+ item_type = getattr(item, "type", None)
+ if item_type not in {
+ "function_call",
+ "web_search",
+ "web_search_call",
+ "mcp_call",
+ "mcp_list_tools",
+ "file_search_call",
+ }:
+ return
+
+ call_id = getattr(item, "call_id", None) or getattr(item, "id", None)
+ if call_id is None:
+ return
+
+ state = self._tool_calls.get(call_id)
+ if state is None:
+ return
+
+ arguments = self._coerce_arguments(getattr(item, "arguments", None))
+ if arguments:
+ state.update(final=arguments)
+
+ if state.server_side:
+ yield from self._complete_tool_execution_step(state)
+ # Start a fresh inference step so the model can continue reasoning.
+ yield from self._start_inference_step()
+ else:
+ if call_id not in self._function_call_ids:
+ self._function_call_ids.append(call_id)
+
+ # ------------------------------------------------------------------ turn end
+
+ def finish_turn(self) -> Iterator[AgentEvent]:
+ if not self.last_response:
+ raise RuntimeError("Cannot finish turn without a response")
+
+ yield TurnCompleted(
+ turn_id=self.turn_id,
+ session_id=self.session_id,
+ final_text=self.last_response.output_text,
+ response_ids=self.all_response_ids,
+ num_steps=self.step_counter,
+ )
diff --git a/src/llama_stack_client/lib/agents/react/agent.py b/src/llama_stack_client/lib/agents/react/agent.py
index 919f0420..cd14c6bf 100644
--- a/src/llama_stack_client/lib/agents/react/agent.py
+++ b/src/llama_stack_client/lib/agents/react/agent.py
@@ -4,225 +4,91 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import logging
-from typing import Any, Callable, List, Optional, Tuple, Union
-
-from llama_stack_client import LlamaStackClient
-from llama_stack_client.types.agent_create_params import AgentConfig
-from llama_stack_client.types.agents.turn_create_params import Toolgroup
-from llama_stack_client.types.shared_params.agent_config import ToolConfig
-from llama_stack_client.types.shared_params.response_format import ResponseFormat
-from llama_stack_client.types.shared_params.sampling_params import SamplingParams
+from collections.abc import Mapping
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from ...._types import Headers
from ..agent import Agent, AgentUtils
from ..client_tool import ClientTool
from ..tool_parser import ToolParser
from .prompts import DEFAULT_REACT_AGENT_SYSTEM_PROMPT_TEMPLATE
-from .tool_parser import ReActOutput, ReActToolParser
+from .tool_parser import ReActToolParser
logger = logging.getLogger(__name__)
-def get_tool_defs(
- client: LlamaStackClient, builtin_toolgroups: Tuple[Toolgroup] = (), client_tools: Tuple[ClientTool] = ()
-):
- tool_defs = []
- for x in builtin_toolgroups:
- if isinstance(x, str):
- toolgroup_id = x
- else:
- toolgroup_id = x["name"]
- tool_defs.extend(
- [
- {
- "name": tool.identifier,
- "description": tool.description,
- "input_schema": tool.input_schema,
- }
- for tool in client.tools.list(toolgroup_id=toolgroup_id)
- ]
- )
+def _tool_definition_from_mapping(tool: Mapping[str, Any]) -> Dict[str, Any]:
+ name = tool.get("name") or tool.get("identifier") or tool.get("tool_name") or tool.get("type") or "tool"
+ description = tool.get("description") or tool.get("summary") or ""
+ parameters = tool.get("parameters") or tool.get("input_schema") or {}
+ return {
+ "name": str(name),
+ "description": str(description),
+ "input_schema": parameters,
+ }
+
+def _collect_tool_definitions(
+ server_tools: Tuple[Mapping[str, Any], ...],
+ client_tools: Tuple[ClientTool, ...],
+) -> List[Dict[str, Any]]:
+ tool_defs = [_tool_definition_from_mapping(tool) for tool in server_tools]
tool_defs.extend(
- [
- {
- "name": tool.get_name(),
- "description": tool.get_description(),
- "input_schema": tool.get_input_schema(),
- }
- for tool in client_tools
- ]
+ {
+ "name": tool.get_name(),
+ "description": tool.get_description(),
+ "input_schema": tool.get_input_schema(),
+ }
+ for tool in client_tools
)
return tool_defs
-def get_default_react_instructions(
- client: LlamaStackClient, builtin_toolgroups: Tuple[str] = (), client_tools: Tuple[ClientTool] = ()
-):
- tool_defs = get_tool_defs(client, builtin_toolgroups, client_tools)
- tool_names = ", ".join([x["name"] for x in tool_defs])
- tool_descriptions = "\n".join([f"- {x['name']}: {x}" for x in tool_defs])
+def get_default_react_instructions(tool_defs: List[Dict[str, Any]]) -> str:
+ tool_names = ", ".join([definition["name"] for definition in tool_defs])
+ tool_descriptions = "\n".join(
+ [
+ f"- {definition['name']}: {definition['description'] or definition['input_schema']}"
+ for definition in tool_defs
+ ]
+ )
instruction = DEFAULT_REACT_AGENT_SYSTEM_PROMPT_TEMPLATE.replace("<>", tool_names).replace(
"<>", tool_descriptions
)
return instruction
-def get_agent_config_DEPRECATED(
- client: LlamaStackClient,
- model: str,
- builtin_toolgroups: Tuple[str] = (),
- client_tools: Tuple[ClientTool] = (),
- json_response_format: bool = False,
- custom_agent_config: Optional[AgentConfig] = None,
-) -> AgentConfig:
- if custom_agent_config is None:
- instruction = get_default_react_instructions(client, builtin_toolgroups, client_tools)
-
- # user default toolgroups
- agent_config = AgentConfig(
- model=model,
- instructions=instruction,
- toolgroups=builtin_toolgroups,
- client_tools=[client_tool.get_tool_definition() for client_tool in client_tools],
- tool_config={
- "tool_choice": "auto",
- "system_message_behavior": "replace",
- },
- input_shields=[],
- output_shields=[],
- enable_session_persistence=False,
- )
- else:
- agent_config = custom_agent_config
-
- if json_response_format:
- agent_config["response_format"] = {
- "type": "json_schema",
- "json_schema": ReActOutput.model_json_schema(),
- }
-
- return agent_config
-
-
class ReActAgent(Agent):
- """ReAct agent.
-
- Simple wrapper around Agent to add prepare prompts for creating a ReAct agent from a list of tools.
- """
-
def __init__(
self,
- client: LlamaStackClient,
+ client: Any,
+ *,
model: str,
- tool_parser: ToolParser = ReActToolParser(),
+ tools: Optional[List[Union[Dict[str, Any], ClientTool, Callable[..., Any]]]] = None,
+ tool_parser: Optional[ToolParser] = None,
instructions: Optional[str] = None,
- tools: Optional[List[Union[Toolgroup, ClientTool, Callable[..., Any]]]] = None,
- tool_config: Optional[ToolConfig] = None,
- sampling_params: Optional[SamplingParams] = None,
- max_infer_iters: Optional[int] = None,
- input_shields: Optional[List[str]] = None,
- output_shields: Optional[List[str]] = None,
- response_format: Optional[ResponseFormat] = None,
- enable_session_persistence: Optional[bool] = None,
- json_response_format: bool = False,
- builtin_toolgroups: Tuple[str] = (), # DEPRECATED
- client_tools: Tuple[ClientTool] = (), # DEPRECATED
- custom_agent_config: Optional[AgentConfig] = None, # DEPRECATED
extra_headers: Headers | None = None,
+ json_response_format: bool = False,
):
- """Construct an Agent with the given parameters.
-
- :param client: The LlamaStackClient instance.
- :param custom_agent_config: The AgentConfig instance.
- ::deprecated: use other parameters instead
- :param client_tools: A tuple of ClientTool instances.
- ::deprecated: use tools instead
- :param builtin_toolgroups: A tuple of Toolgroup instances.
- ::deprecated: use tools instead
- :param tool_parser: Custom logic that parses tool calls from a message.
- :param model: The model to use for the agent.
- :param instructions: The instructions for the agent.
- :param tools: A list of tools for the agent. Values can be one of the following:
- - dict representing a toolgroup/tool with arguments: e.g. {"name": "builtin::rag/knowledge_search", "args": {"vector_db_ids": [123]}}
- - a python function with a docstring. See @client_tool for more details.
- - str representing a tool within a toolgroup: e.g. "builtin::rag/knowledge_search"
- - str representing a toolgroup_id: e.g. "builtin::rag", "builtin::code_interpreter", where all tools in the toolgroup will be added to the agent
- - an instance of ClientTool: A client tool object.
- :param tool_config: The tool configuration for the agent.
- :param sampling_params: The sampling parameters for the agent.
- :param max_infer_iters: The maximum number of inference iterations.
- :param input_shields: The input shields for the agent.
- :param output_shields: The output shields for the agent.
- :param response_format: The response format for the agent.
- :param enable_session_persistence: Whether to enable session persistence.
- :param json_response_format: Whether to use the json response format with default ReAct output schema.
- ::deprecated: use response_format instead
- :param extra_headers: Extra headers to add to all requests sent by the agent.
- """
- use_deprecated_params = False
- if custom_agent_config is not None:
- logger.warning("`custom_agent_config` is deprecated. Use inlined parameters instead.")
- use_deprecated_params = True
- if client_tools != ():
- logger.warning("`client_tools` is deprecated. Use `tools` instead.")
- use_deprecated_params = True
- if builtin_toolgroups != ():
- logger.warning("`builtin_toolgroups` is deprecated. Use `tools` instead.")
- use_deprecated_params = True
+ if json_response_format:
+ logger.warning("`json_response_format` is deprecated and will be removed in a future release.")
- if use_deprecated_params:
- agent_config = get_agent_config_DEPRECATED(
- client=client,
- model=model,
- builtin_toolgroups=builtin_toolgroups,
- client_tools=client_tools,
- json_response_format=json_response_format,
- )
- super().__init__(
- client=client,
- agent_config=agent_config,
- client_tools=client_tools,
- tool_parser=tool_parser,
- extra_headers=extra_headers,
- )
+ if tool_parser is None:
+ tool_parser = ReActToolParser()
- else:
- if not tool_config:
- tool_config = {
- "tool_choice": "auto",
- "system_message_behavior": "replace",
- }
+ tool_list = tools or []
+ client_tool_instances = AgentUtils.get_client_tools(tool_list)
+ server_tool_defs = tuple(tool for tool in tool_list if isinstance(tool, Mapping))
- if json_response_format:
- if instructions is not None:
- logger.warning(
- "Using a custom instructions, but json_response_format is set. Please make sure instructions are"
- "compatible with the default ReAct output format."
- )
- response_format = {
- "type": "json_schema",
- "json_schema": ReActOutput.model_json_schema(),
- }
+ if instructions is None:
+ tool_definitions = _collect_tool_definitions(tuple(server_tool_defs), tuple(client_tool_instances))
+ instructions = get_default_react_instructions(tool_definitions)
- # build REACT instructions
- client_tools = AgentUtils.get_client_tools(tools)
- builtin_toolgroups = [x for x in tools if isinstance(x, str) or isinstance(x, dict)]
- if not instructions:
- instructions = get_default_react_instructions(client, builtin_toolgroups, client_tools)
-
- super().__init__(
- client=client,
- model=model,
- tool_parser=tool_parser,
- instructions=instructions,
- tools=tools,
- tool_config=tool_config,
- sampling_params=sampling_params,
- max_infer_iters=max_infer_iters,
- input_shields=input_shields,
- output_shields=output_shields,
- response_format=response_format,
- enable_session_persistence=enable_session_persistence,
- extra_headers=extra_headers,
- )
+ super().__init__(
+ client=client,
+ model=model,
+ instructions=instructions,
+ tools=tool_list,
+ tool_parser=tool_parser,
+ extra_headers=extra_headers,
+ )
diff --git a/src/llama_stack_client/lib/agents/react/tool_parser.py b/src/llama_stack_client/lib/agents/react/tool_parser.py
index a796abac..9120f83d 100644
--- a/src/llama_stack_client/lib/agents/react/tool_parser.py
+++ b/src/llama_stack_client/lib/agents/react/tool_parser.py
@@ -8,8 +8,7 @@
import uuid
from typing import List, Optional, Union
-from llama_stack_client.types.shared.completion_message import CompletionMessage
-from llama_stack_client.types.shared.tool_call import ToolCall
+from ..types import CompletionMessage, ToolCall
from pydantic import BaseModel, ValidationError
diff --git a/src/llama_stack_client/lib/agents/tool_parser.py b/src/llama_stack_client/lib/agents/tool_parser.py
index ca8d28ea..0e6a97ad 100644
--- a/src/llama_stack_client/lib/agents/tool_parser.py
+++ b/src/llama_stack_client/lib/agents/tool_parser.py
@@ -7,8 +7,7 @@
from abc import abstractmethod
from typing import List
-from llama_stack_client.types.alpha.agents.turn import CompletionMessage
-from llama_stack_client.types.shared.tool_call import ToolCall
+from .types import CompletionMessage, ToolCall
class ToolParser:
diff --git a/src/llama_stack_client/lib/agents/turn_events.py b/src/llama_stack_client/lib/agents/turn_events.py
new file mode 100644
index 00000000..cca11095
--- /dev/null
+++ b/src/llama_stack_client/lib/agents/turn_events.py
@@ -0,0 +1,275 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""High-level turn and step events for agent interactions.
+
+This module defines the semantic event model that wraps the lower-level
+responses API stream events. It provides a turn/step conceptual model that
+makes agent interactions easier to understand and work with.
+
+Key concepts:
+- Turn: A complete interaction loop that may span multiple responses
+- Step: A distinct phase within a turn (inference or tool_execution)
+- Delta: Incremental updates during step execution
+- Result: Complete output when a step finishes
+"""
+
+from dataclasses import dataclass
+from typing import Union, List, Optional, Dict, Any, Literal
+
+from .types import ToolCall
+
+__all__ = [
+ "TurnStarted",
+ "TurnCompleted",
+ "TurnFailed",
+ "StepStarted",
+ "StepProgress",
+ "StepCompleted",
+ "TextDelta",
+ "ToolCallIssuedDelta",
+ "ToolCallDelta",
+ "ToolCallCompletedDelta",
+ "StepDelta",
+ "InferenceStepResult",
+ "ToolExecutionStepResult",
+ "StepResult",
+ "AgentEvent",
+ "AgentStreamChunk",
+]
+
+
+# ============= Turn-Level Events =============
+
+
+@dataclass
+class TurnStarted:
+ """Emitted when agent begins processing user input.
+
+ This marks the beginning of a complete interaction cycle that may
+ involve multiple inference steps and tool executions.
+ """
+
+ turn_id: str
+ session_id: str
+ event_type: Literal["turn_started"] = "turn_started"
+
+
+@dataclass
+class TurnCompleted:
+ """Emitted when agent finishes with final answer.
+
+ This marks the end of a turn when the model has produced a final
+ response without any pending client-side tool calls.
+ """
+
+ turn_id: str
+ session_id: str
+ final_text: str
+ response_ids: List[str] # All response IDs involved in this turn
+ num_steps: int
+ event_type: Literal["turn_completed"] = "turn_completed"
+
+
+@dataclass
+class TurnFailed:
+ """Emitted if turn processing fails.
+
+ This indicates an unrecoverable error during turn processing.
+ """
+
+ turn_id: str
+ session_id: str
+ error_message: str
+ event_type: Literal["turn_failed"] = "turn_failed"
+
+
+# ============= Step-Level Events =============
+
+
+@dataclass
+class StepStarted:
+ """Emitted when a distinct work phase begins.
+
+ Steps represent distinct phases of work within a turn:
+ - inference: Model thinking/generation (deciding what to do)
+ - tool_execution: Tool execution (server-side or client-side)
+ """
+
+ step_id: str
+ step_type: Literal["inference", "tool_execution"]
+ turn_id: str
+ event_type: Literal["step_started"] = "step_started"
+ metadata: Optional[Dict[str, Any]] = None # e.g., {"server_side": True/False, "tool_type": "file_search"}
+
+
+# ============= Progress Delta Types =============
+
+
+@dataclass
+class TextDelta:
+ """Incremental text during inference.
+
+ Emitted as the model generates text token by token.
+ """
+
+ text: str
+ delta_type: Literal["text"] = "text"
+
+
+@dataclass
+class ToolCallIssuedDelta:
+ """Model initiates a tool call (client or server-side).
+
+ This is emitted when the model decides to call a tool. The tool_type
+ field indicates whether this is:
+ - "function": Client-side tool requiring client execution
+ - Other types: Server-side tools executed within the response
+ """
+
+ call_id: str
+ tool_type: Literal["function", "file_search", "web_search", "mcp_call", "mcp_list_tools", "memory_retrieval"]
+ tool_name: str
+ arguments: str # JSON string
+ delta_type: Literal["tool_call_issued"] = "tool_call_issued"
+
+
+@dataclass
+class ToolCallDelta:
+ """Incremental tool call arguments (streaming).
+
+ Emitted as the model streams tool call arguments. The arguments
+ are accumulated over multiple deltas to form the complete JSON.
+ """
+
+ call_id: str
+ arguments_delta: str
+ delta_type: Literal["tool_call_delta"] = "tool_call_delta"
+
+
+@dataclass
+class ToolCallCompletedDelta:
+ """Server-side tool execution completed.
+
+ Emitted when a server-side tool (file_search, web_search, etc.)
+ finishes execution. The result field contains the tool output.
+
+ Note: Client-side function tools do NOT emit this event; instead
+ they trigger a separate tool_execution step.
+ """
+
+ call_id: str
+ tool_type: Literal["file_search", "web_search", "mcp_call", "mcp_list_tools", "memory_retrieval"]
+ tool_name: str
+ result: Any # Tool execution result from server
+ delta_type: Literal["tool_call_completed"] = "tool_call_completed"
+
+
+# Union of all delta types
+StepDelta = Union[TextDelta, ToolCallIssuedDelta, ToolCallDelta, ToolCallCompletedDelta]
+
+
+@dataclass
+class StepProgress:
+ """Emitted during step execution with streaming updates.
+
+ Progress events provide real-time updates as a step executes,
+ including text deltas and tool call information.
+ """
+
+ step_id: str
+ step_type: Literal["inference", "tool_execution"]
+ turn_id: str
+ delta: StepDelta
+ event_type: Literal["step_progress"] = "step_progress"
+
+
+# ============= Step Result Types =============
+
+
+@dataclass
+class InferenceStepResult:
+ """Complete inference step output.
+
+ This contains the final accumulated state after an inference step
+ completes. It separates client-side function calls (which need
+ client execution) from server-side tool executions (which are
+ included for logging/reference only).
+ """
+
+ step_id: str
+ response_id: str
+ text_content: str
+ function_calls: List[ToolCall] # Client-side function calls that need execution
+ server_tool_executions: List[Dict[str, Any]] # Server-side tool calls (for reference/logging)
+ stop_reason: str
+
+
+@dataclass
+class ToolExecutionStepResult:
+ """Complete tool execution step output (client-side only).
+
+ This contains the results of executing client-side function tools.
+ These results will be fed back to the model in the next inference step.
+ """
+
+ step_id: str
+ tool_calls: List[ToolCall] # Function calls executed
+ tool_responses: List[Dict[str, Any]] # Normalized responses
+
+
+# Union of all result types
+StepResult = Union[InferenceStepResult, ToolExecutionStepResult]
+
+
+@dataclass
+class StepCompleted:
+ """Emitted when a step finishes.
+
+ This provides the complete result of the step execution, including
+ all accumulated data and final state.
+ """
+
+ step_id: str
+ step_type: Literal["inference", "tool_execution"]
+ turn_id: str
+ result: StepResult
+ event_type: Literal["step_completed"] = "step_completed"
+
+
+# ============= Unified Event Type =============
+
+
+# Union of all event types
+AgentEvent = Union[
+ TurnStarted,
+ StepStarted,
+ StepProgress,
+ StepCompleted,
+ TurnCompleted,
+ TurnFailed,
+]
+
+
+@dataclass
+class AgentStreamChunk:
+ """What the agent yields to users.
+
+ This is the top-level container for streaming events. Each chunk
+ contains a high-level event (turn or step) and optionally the
+ final response payload when the turn completes.
+
+ Usage:
+ for chunk in agent.create_turn(messages, session_id, stream=True):
+ if isinstance(chunk.event, StepProgress):
+ if isinstance(chunk.event.delta, TextDelta):
+ print(chunk.event.delta.text, end="")
+ elif isinstance(chunk.event, TurnCompleted):
+ print(f"\\nDone! Response: {chunk.response}")
+ """
+
+ event: AgentEvent
+ response: Optional[Any] = None # Only set on TurnCompleted
diff --git a/src/llama_stack_client/lib/agents/types.py b/src/llama_stack_client/lib/agents/types.py
new file mode 100644
index 00000000..8ec67485
--- /dev/null
+++ b/src/llama_stack_client/lib/agents/types.py
@@ -0,0 +1,63 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""Lightweight agent-facing types that avoid llama-stack SDK dependencies."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, Dict, List, Protocol, TypedDict
+
+
+@dataclass
+class ToolCall:
+ """Minimal representation of an issued tool call."""
+
+ call_id: str
+ tool_name: str
+ arguments: str
+
+
+@dataclass
+class ToolResponse:
+ """Payload returned from executing a client-side tool."""
+
+ call_id: str
+ tool_name: str
+ content: Any
+ metadata: Dict[str, Any] = field(default_factory=dict)
+
+
+@dataclass
+class CompletionMessage:
+ """Synthetic completion message mirroring the OpenAI Responses schema."""
+
+ role: str
+ content: Any
+ tool_calls: List[ToolCall]
+ stop_reason: str
+
+
+Message = CompletionMessage
+
+
+class ToolDefinition(TypedDict, total=False):
+ """Definition object passed to the Responses API when registering tools."""
+
+ type: str
+ name: str
+ description: str
+ parameters: Dict[str, Any]
+
+
+class FunctionTool(Protocol):
+ """Protocol describing the minimal surface area we expect from tools."""
+
+ def get_name(self) -> str: ...
+
+ def get_description(self) -> str: ...
+
+ def get_input_schema(self) -> Dict[str, Any]: ...
diff --git a/src/llama_stack_client/lib/cli/inspect/__init__.py b/src/llama_stack_client/lib/cli/inspect/__init__.py
index db651969..c2ccb0c7 100644
--- a/src/llama_stack_client/lib/cli/inspect/__init__.py
+++ b/src/llama_stack_client/lib/cli/inspect/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from .inspect import inspect
__all__ = ["inspect"]
diff --git a/src/llama_stack_client/lib/cli/inspect/inspect.py b/src/llama_stack_client/lib/cli/inspect/inspect.py
index f9c85b1b..3caee63a 100644
--- a/src/llama_stack_client/lib/cli/inspect/inspect.py
+++ b/src/llama_stack_client/lib/cli/inspect/inspect.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import click
from .version import inspect_version
diff --git a/src/llama_stack_client/lib/cli/inspect/version.py b/src/llama_stack_client/lib/cli/inspect/version.py
index 0a32195d..d1bf604c 100644
--- a/src/llama_stack_client/lib/cli/inspect/version.py
+++ b/src/llama_stack_client/lib/cli/inspect/version.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import click
from rich.console import Console
diff --git a/src/llama_stack_client/lib/cli/providers/__init__.py b/src/llama_stack_client/lib/cli/providers/__init__.py
index 2e632915..61601717 100644
--- a/src/llama_stack_client/lib/cli/providers/__init__.py
+++ b/src/llama_stack_client/lib/cli/providers/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from .providers import providers
__all__ = ["providers"]
diff --git a/src/llama_stack_client/lib/cli/providers/inspect.py b/src/llama_stack_client/lib/cli/providers/inspect.py
index 7902849b..b70b556f 100644
--- a/src/llama_stack_client/lib/cli/providers/inspect.py
+++ b/src/llama_stack_client/lib/cli/providers/inspect.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import click
import yaml
from rich.console import Console
diff --git a/src/llama_stack_client/lib/cli/providers/list.py b/src/llama_stack_client/lib/cli/providers/list.py
index 692860e3..708ed9c8 100644
--- a/src/llama_stack_client/lib/cli/providers/list.py
+++ b/src/llama_stack_client/lib/cli/providers/list.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import click
from rich.console import Console
from rich.table import Table
diff --git a/src/llama_stack_client/lib/cli/providers/providers.py b/src/llama_stack_client/lib/cli/providers/providers.py
index bd07628d..1849d5f7 100644
--- a/src/llama_stack_client/lib/cli/providers/providers.py
+++ b/src/llama_stack_client/lib/cli/providers/providers.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import click
from .list import list_providers
diff --git a/src/llama_stack_client/lib/inline/inline.py b/src/llama_stack_client/lib/inline/inline.py
index e69de29b..756f351d 100644
--- a/src/llama_stack_client/lib/inline/inline.py
+++ b/src/llama_stack_client/lib/inline/inline.py
@@ -0,0 +1,5 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
diff --git a/src/llama_stack_client/lib/stream_printer.py b/src/llama_stack_client/lib/stream_printer.py
index a08d9663..1a302787 100644
--- a/src/llama_stack_client/lib/stream_printer.py
+++ b/src/llama_stack_client/lib/stream_printer.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from .agents.event_logger import TurnStreamEventPrinter
from .inference.event_logger import InferenceStreamLogEventPrinter
diff --git a/src/llama_stack_client/lib/tools/mcp_oauth.py b/src/llama_stack_client/lib/tools/mcp_oauth.py
index 503b9c69..6125b873 100644
--- a/src/llama_stack_client/lib/tools/mcp_oauth.py
+++ b/src/llama_stack_client/lib/tools/mcp_oauth.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import asyncio
import base64
import hashlib
diff --git a/src/llama_stack_client/pagination.py b/src/llama_stack_client/pagination.py
index 67106bc5..7ee7118d 100644
--- a/src/llama_stack_client/pagination.py
+++ b/src/llama_stack_client/pagination.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Generic, TypeVar, Optional
diff --git a/src/llama_stack_client/resources/__init__.py b/src/llama_stack_client/resources/__init__.py
index b1a7a120..60b18979 100644
--- a/src/llama_stack_client/resources/__init__.py
+++ b/src/llama_stack_client/resources/__init__.py
@@ -1,5 +1,19 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+from .beta import (
+ BetaResource,
+ AsyncBetaResource,
+ BetaResourceWithRawResponse,
+ AsyncBetaResourceWithRawResponse,
+ BetaResourceWithStreamingResponse,
+ AsyncBetaResourceWithStreamingResponse,
+)
from .chat import (
ChatResource,
AsyncChatResource,
@@ -80,14 +94,6 @@
ShieldsResourceWithStreamingResponse,
AsyncShieldsResourceWithStreamingResponse,
)
-from .datasets import (
- DatasetsResource,
- AsyncDatasetsResource,
- DatasetsResourceWithRawResponse,
- AsyncDatasetsResourceWithRawResponse,
- DatasetsResourceWithStreamingResponse,
- AsyncDatasetsResourceWithStreamingResponse,
-)
from .providers import (
ProvidersResource,
AsyncProvidersResource,
@@ -104,14 +110,6 @@
ResponsesResourceWithStreamingResponse,
AsyncResponsesResourceWithStreamingResponse,
)
-from .telemetry import (
- TelemetryResource,
- AsyncTelemetryResource,
- TelemetryResourceWithRawResponse,
- AsyncTelemetryResourceWithRawResponse,
- TelemetryResourceWithStreamingResponse,
- AsyncTelemetryResourceWithStreamingResponse,
-)
from .vector_io import (
VectorIoResource,
AsyncVectorIoResource,
@@ -120,14 +118,6 @@
VectorIoResourceWithStreamingResponse,
AsyncVectorIoResourceWithStreamingResponse,
)
-from .benchmarks import (
- BenchmarksResource,
- AsyncBenchmarksResource,
- BenchmarksResourceWithRawResponse,
- AsyncBenchmarksResourceWithRawResponse,
- BenchmarksResourceWithStreamingResponse,
- AsyncBenchmarksResourceWithStreamingResponse,
-)
from .embeddings import (
EmbeddingsResource,
AsyncEmbeddingsResource,
@@ -232,12 +222,6 @@
"AsyncConversationsResourceWithRawResponse",
"ConversationsResourceWithStreamingResponse",
"AsyncConversationsResourceWithStreamingResponse",
- "DatasetsResource",
- "AsyncDatasetsResource",
- "DatasetsResourceWithRawResponse",
- "AsyncDatasetsResourceWithRawResponse",
- "DatasetsResourceWithStreamingResponse",
- "AsyncDatasetsResourceWithStreamingResponse",
"InspectResource",
"AsyncInspectResource",
"InspectResourceWithRawResponse",
@@ -316,12 +300,6 @@
"AsyncSyntheticDataGenerationResourceWithRawResponse",
"SyntheticDataGenerationResourceWithStreamingResponse",
"AsyncSyntheticDataGenerationResourceWithStreamingResponse",
- "TelemetryResource",
- "AsyncTelemetryResource",
- "TelemetryResourceWithRawResponse",
- "AsyncTelemetryResourceWithRawResponse",
- "TelemetryResourceWithStreamingResponse",
- "AsyncTelemetryResourceWithStreamingResponse",
"ScoringResource",
"AsyncScoringResource",
"ScoringResourceWithRawResponse",
@@ -334,12 +312,6 @@
"AsyncScoringFunctionsResourceWithRawResponse",
"ScoringFunctionsResourceWithStreamingResponse",
"AsyncScoringFunctionsResourceWithStreamingResponse",
- "BenchmarksResource",
- "AsyncBenchmarksResource",
- "BenchmarksResourceWithRawResponse",
- "AsyncBenchmarksResourceWithRawResponse",
- "BenchmarksResourceWithStreamingResponse",
- "AsyncBenchmarksResourceWithStreamingResponse",
"FilesResource",
"AsyncFilesResource",
"FilesResourceWithRawResponse",
@@ -352,4 +324,10 @@
"AsyncAlphaResourceWithRawResponse",
"AlphaResourceWithStreamingResponse",
"AsyncAlphaResourceWithStreamingResponse",
+ "BetaResource",
+ "AsyncBetaResource",
+ "BetaResourceWithRawResponse",
+ "AsyncBetaResourceWithRawResponse",
+ "BetaResourceWithStreamingResponse",
+ "AsyncBetaResourceWithStreamingResponse",
]
diff --git a/src/llama_stack_client/resources/alpha/__init__.py b/src/llama_stack_client/resources/alpha/__init__.py
index c3c4b0d9..ae13bed1 100644
--- a/src/llama_stack_client/resources/alpha/__init__.py
+++ b/src/llama_stack_client/resources/alpha/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .eval import (
@@ -32,6 +38,14 @@
InferenceResourceWithStreamingResponse,
AsyncInferenceResourceWithStreamingResponse,
)
+from .benchmarks import (
+ BenchmarksResource,
+ AsyncBenchmarksResource,
+ BenchmarksResourceWithRawResponse,
+ AsyncBenchmarksResourceWithRawResponse,
+ BenchmarksResourceWithStreamingResponse,
+ AsyncBenchmarksResourceWithStreamingResponse,
+)
from .post_training import (
PostTrainingResource,
AsyncPostTrainingResource,
@@ -54,6 +68,12 @@
"AsyncPostTrainingResourceWithRawResponse",
"PostTrainingResourceWithStreamingResponse",
"AsyncPostTrainingResourceWithStreamingResponse",
+ "BenchmarksResource",
+ "AsyncBenchmarksResource",
+ "BenchmarksResourceWithRawResponse",
+ "AsyncBenchmarksResourceWithRawResponse",
+ "BenchmarksResourceWithStreamingResponse",
+ "AsyncBenchmarksResourceWithStreamingResponse",
"EvalResource",
"AsyncEvalResource",
"EvalResourceWithRawResponse",
diff --git a/src/llama_stack_client/resources/alpha/agents/__init__.py b/src/llama_stack_client/resources/alpha/agents/__init__.py
index 17f0098f..6502dfa1 100644
--- a/src/llama_stack_client/resources/alpha/agents/__init__.py
+++ b/src/llama_stack_client/resources/alpha/agents/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .turn import (
diff --git a/src/llama_stack_client/resources/alpha/agents/agents.py b/src/llama_stack_client/resources/alpha/agents/agents.py
index 0e81cce7..ac5f58e4 100644
--- a/src/llama_stack_client/resources/alpha/agents/agents.py
+++ b/src/llama_stack_client/resources/alpha/agents/agents.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/agents/session.py b/src/llama_stack_client/resources/alpha/agents/session.py
index 2e980add..ae2b5af6 100644
--- a/src/llama_stack_client/resources/alpha/agents/session.py
+++ b/src/llama_stack_client/resources/alpha/agents/session.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/agents/steps.py b/src/llama_stack_client/resources/alpha/agents/steps.py
index 838822d0..83624bef 100644
--- a/src/llama_stack_client/resources/alpha/agents/steps.py
+++ b/src/llama_stack_client/resources/alpha/agents/steps.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/agents/turn.py b/src/llama_stack_client/resources/alpha/agents/turn.py
index ffe766b6..85e3d1e5 100644
--- a/src/llama_stack_client/resources/alpha/agents/turn.py
+++ b/src/llama_stack_client/resources/alpha/agents/turn.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/alpha.py b/src/llama_stack_client/resources/alpha/alpha.py
index 77d4115b..9ba65570 100644
--- a/src/llama_stack_client/resources/alpha/alpha.py
+++ b/src/llama_stack_client/resources/alpha/alpha.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -19,6 +25,14 @@
InferenceResourceWithStreamingResponse,
AsyncInferenceResourceWithStreamingResponse,
)
+from .benchmarks import (
+ BenchmarksResource,
+ AsyncBenchmarksResource,
+ BenchmarksResourceWithRawResponse,
+ AsyncBenchmarksResourceWithRawResponse,
+ BenchmarksResourceWithStreamingResponse,
+ AsyncBenchmarksResourceWithStreamingResponse,
+)
from ..._resource import SyncAPIResource, AsyncAPIResource
from .agents.agents import (
AgentsResource,
@@ -49,6 +63,10 @@ def inference(self) -> InferenceResource:
def post_training(self) -> PostTrainingResource:
return PostTrainingResource(self._client)
+ @cached_property
+ def benchmarks(self) -> BenchmarksResource:
+ return BenchmarksResource(self._client)
+
@cached_property
def eval(self) -> EvalResource:
return EvalResource(self._client)
@@ -86,6 +104,10 @@ def inference(self) -> AsyncInferenceResource:
def post_training(self) -> AsyncPostTrainingResource:
return AsyncPostTrainingResource(self._client)
+ @cached_property
+ def benchmarks(self) -> AsyncBenchmarksResource:
+ return AsyncBenchmarksResource(self._client)
+
@cached_property
def eval(self) -> AsyncEvalResource:
return AsyncEvalResource(self._client)
@@ -126,6 +148,10 @@ def inference(self) -> InferenceResourceWithRawResponse:
def post_training(self) -> PostTrainingResourceWithRawResponse:
return PostTrainingResourceWithRawResponse(self._alpha.post_training)
+ @cached_property
+ def benchmarks(self) -> BenchmarksResourceWithRawResponse:
+ return BenchmarksResourceWithRawResponse(self._alpha.benchmarks)
+
@cached_property
def eval(self) -> EvalResourceWithRawResponse:
return EvalResourceWithRawResponse(self._alpha.eval)
@@ -147,6 +173,10 @@ def inference(self) -> AsyncInferenceResourceWithRawResponse:
def post_training(self) -> AsyncPostTrainingResourceWithRawResponse:
return AsyncPostTrainingResourceWithRawResponse(self._alpha.post_training)
+ @cached_property
+ def benchmarks(self) -> AsyncBenchmarksResourceWithRawResponse:
+ return AsyncBenchmarksResourceWithRawResponse(self._alpha.benchmarks)
+
@cached_property
def eval(self) -> AsyncEvalResourceWithRawResponse:
return AsyncEvalResourceWithRawResponse(self._alpha.eval)
@@ -168,6 +198,10 @@ def inference(self) -> InferenceResourceWithStreamingResponse:
def post_training(self) -> PostTrainingResourceWithStreamingResponse:
return PostTrainingResourceWithStreamingResponse(self._alpha.post_training)
+ @cached_property
+ def benchmarks(self) -> BenchmarksResourceWithStreamingResponse:
+ return BenchmarksResourceWithStreamingResponse(self._alpha.benchmarks)
+
@cached_property
def eval(self) -> EvalResourceWithStreamingResponse:
return EvalResourceWithStreamingResponse(self._alpha.eval)
@@ -189,6 +223,10 @@ def inference(self) -> AsyncInferenceResourceWithStreamingResponse:
def post_training(self) -> AsyncPostTrainingResourceWithStreamingResponse:
return AsyncPostTrainingResourceWithStreamingResponse(self._alpha.post_training)
+ @cached_property
+ def benchmarks(self) -> AsyncBenchmarksResourceWithStreamingResponse:
+ return AsyncBenchmarksResourceWithStreamingResponse(self._alpha.benchmarks)
+
@cached_property
def eval(self) -> AsyncEvalResourceWithStreamingResponse:
return AsyncEvalResourceWithStreamingResponse(self._alpha.eval)
diff --git a/src/llama_stack_client/resources/benchmarks.py b/src/llama_stack_client/resources/alpha/benchmarks.py
similarity index 94%
rename from src/llama_stack_client/resources/benchmarks.py
rename to src/llama_stack_client/resources/alpha/benchmarks.py
index 3d33bdcf..dc74cc85 100644
--- a/src/llama_stack_client/resources/benchmarks.py
+++ b/src/llama_stack_client/resources/alpha/benchmarks.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -6,21 +12,21 @@
import httpx
-from ..types import benchmark_register_params
-from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
+from ..._utils import maybe_transform, async_maybe_transform
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from .._wrappers import DataWrapper
-from .._base_client import make_request_options
-from ..types.benchmark import Benchmark
-from ..types.benchmark_list_response import BenchmarkListResponse
+from ..._wrappers import DataWrapper
+from ...types.alpha import benchmark_register_params
+from ..._base_client import make_request_options
+from ...types.alpha.benchmark import Benchmark
+from ...types.alpha.benchmark_list_response import BenchmarkListResponse
__all__ = ["BenchmarksResource", "AsyncBenchmarksResource"]
diff --git a/src/llama_stack_client/resources/alpha/eval/__init__.py b/src/llama_stack_client/resources/alpha/eval/__init__.py
index f6473395..3aa93594 100644
--- a/src/llama_stack_client/resources/alpha/eval/__init__.py
+++ b/src/llama_stack_client/resources/alpha/eval/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .eval import (
diff --git a/src/llama_stack_client/resources/alpha/eval/eval.py b/src/llama_stack_client/resources/alpha/eval/eval.py
index b5347c0b..89101510 100644
--- a/src/llama_stack_client/resources/alpha/eval/eval.py
+++ b/src/llama_stack_client/resources/alpha/eval/eval.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/eval/jobs.py b/src/llama_stack_client/resources/alpha/eval/jobs.py
index 8f0fa026..94eed41e 100644
--- a/src/llama_stack_client/resources/alpha/eval/jobs.py
+++ b/src/llama_stack_client/resources/alpha/eval/jobs.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/inference.py b/src/llama_stack_client/resources/alpha/inference.py
index ca259357..9db21d26 100644
--- a/src/llama_stack_client/resources/alpha/inference.py
+++ b/src/llama_stack_client/resources/alpha/inference.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/post_training/__init__.py b/src/llama_stack_client/resources/alpha/post_training/__init__.py
index e1fa2361..81a6a807 100644
--- a/src/llama_stack_client/resources/alpha/post_training/__init__.py
+++ b/src/llama_stack_client/resources/alpha/post_training/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .job import (
diff --git a/src/llama_stack_client/resources/alpha/post_training/job.py b/src/llama_stack_client/resources/alpha/post_training/job.py
index d9b7173e..8e09f335 100644
--- a/src/llama_stack_client/resources/alpha/post_training/job.py
+++ b/src/llama_stack_client/resources/alpha/post_training/job.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/alpha/post_training/post_training.py b/src/llama_stack_client/resources/alpha/post_training/post_training.py
index a26c813a..9b1fe87a 100644
--- a/src/llama_stack_client/resources/alpha/post_training/post_training.py
+++ b/src/llama_stack_client/resources/alpha/post_training/post_training.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/beta/__init__.py b/src/llama_stack_client/resources/beta/__init__.py
new file mode 100644
index 00000000..6fd69c43
--- /dev/null
+++ b/src/llama_stack_client/resources/beta/__init__.py
@@ -0,0 +1,33 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from .beta import (
+ BetaResource,
+ AsyncBetaResource,
+ BetaResourceWithRawResponse,
+ AsyncBetaResourceWithRawResponse,
+ BetaResourceWithStreamingResponse,
+ AsyncBetaResourceWithStreamingResponse,
+)
+from .datasets import (
+ DatasetsResource,
+ AsyncDatasetsResource,
+ DatasetsResourceWithRawResponse,
+ AsyncDatasetsResourceWithRawResponse,
+ DatasetsResourceWithStreamingResponse,
+ AsyncDatasetsResourceWithStreamingResponse,
+)
+
+__all__ = [
+ "DatasetsResource",
+ "AsyncDatasetsResource",
+ "DatasetsResourceWithRawResponse",
+ "AsyncDatasetsResourceWithRawResponse",
+ "DatasetsResourceWithStreamingResponse",
+ "AsyncDatasetsResourceWithStreamingResponse",
+ "BetaResource",
+ "AsyncBetaResource",
+ "BetaResourceWithRawResponse",
+ "AsyncBetaResourceWithRawResponse",
+ "BetaResourceWithStreamingResponse",
+ "AsyncBetaResourceWithStreamingResponse",
+]
diff --git a/src/llama_stack_client/resources/beta/beta.py b/src/llama_stack_client/resources/beta/beta.py
new file mode 100644
index 00000000..7bf1c711
--- /dev/null
+++ b/src/llama_stack_client/resources/beta/beta.py
@@ -0,0 +1,102 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from .datasets import (
+ DatasetsResource,
+ AsyncDatasetsResource,
+ DatasetsResourceWithRawResponse,
+ AsyncDatasetsResourceWithRawResponse,
+ DatasetsResourceWithStreamingResponse,
+ AsyncDatasetsResourceWithStreamingResponse,
+)
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+
+__all__ = ["BetaResource", "AsyncBetaResource"]
+
+
+class BetaResource(SyncAPIResource):
+ @cached_property
+ def datasets(self) -> DatasetsResource:
+ return DatasetsResource(self._client)
+
+ @cached_property
+ def with_raw_response(self) -> BetaResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers
+ """
+ return BetaResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> BetaResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response
+ """
+ return BetaResourceWithStreamingResponse(self)
+
+
+class AsyncBetaResource(AsyncAPIResource):
+ @cached_property
+ def datasets(self) -> AsyncDatasetsResource:
+ return AsyncDatasetsResource(self._client)
+
+ @cached_property
+ def with_raw_response(self) -> AsyncBetaResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers
+ """
+ return AsyncBetaResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> AsyncBetaResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response
+ """
+ return AsyncBetaResourceWithStreamingResponse(self)
+
+
+class BetaResourceWithRawResponse:
+ def __init__(self, beta: BetaResource) -> None:
+ self._beta = beta
+
+ @cached_property
+ def datasets(self) -> DatasetsResourceWithRawResponse:
+ return DatasetsResourceWithRawResponse(self._beta.datasets)
+
+
+class AsyncBetaResourceWithRawResponse:
+ def __init__(self, beta: AsyncBetaResource) -> None:
+ self._beta = beta
+
+ @cached_property
+ def datasets(self) -> AsyncDatasetsResourceWithRawResponse:
+ return AsyncDatasetsResourceWithRawResponse(self._beta.datasets)
+
+
+class BetaResourceWithStreamingResponse:
+ def __init__(self, beta: BetaResource) -> None:
+ self._beta = beta
+
+ @cached_property
+ def datasets(self) -> DatasetsResourceWithStreamingResponse:
+ return DatasetsResourceWithStreamingResponse(self._beta.datasets)
+
+
+class AsyncBetaResourceWithStreamingResponse:
+ def __init__(self, beta: AsyncBetaResource) -> None:
+ self._beta = beta
+
+ @cached_property
+ def datasets(self) -> AsyncDatasetsResourceWithStreamingResponse:
+ return AsyncDatasetsResourceWithStreamingResponse(self._beta.datasets)
diff --git a/src/llama_stack_client/resources/datasets.py b/src/llama_stack_client/resources/beta/datasets.py
similarity index 96%
rename from src/llama_stack_client/resources/datasets.py
rename to src/llama_stack_client/resources/beta/datasets.py
index 5824287c..03321e48 100644
--- a/src/llama_stack_client/resources/datasets.py
+++ b/src/llama_stack_client/resources/beta/datasets.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -7,23 +13,23 @@
import httpx
-from ..types import dataset_iterrows_params, dataset_register_params, dataset_appendrows_params
-from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
+from ..._utils import maybe_transform, async_maybe_transform
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from .._wrappers import DataWrapper
-from .._base_client import make_request_options
-from ..types.dataset_list_response import DatasetListResponse
-from ..types.dataset_iterrows_response import DatasetIterrowsResponse
-from ..types.dataset_register_response import DatasetRegisterResponse
-from ..types.dataset_retrieve_response import DatasetRetrieveResponse
+from ..._wrappers import DataWrapper
+from ...types.beta import dataset_iterrows_params, dataset_register_params, dataset_appendrows_params
+from ..._base_client import make_request_options
+from ...types.beta.dataset_list_response import DatasetListResponse
+from ...types.beta.dataset_iterrows_response import DatasetIterrowsResponse
+from ...types.beta.dataset_register_response import DatasetRegisterResponse
+from ...types.beta.dataset_retrieve_response import DatasetRetrieveResponse
__all__ = ["DatasetsResource", "AsyncDatasetsResource"]
diff --git a/src/llama_stack_client/resources/chat/__init__.py b/src/llama_stack_client/resources/chat/__init__.py
index ec960eb4..718065af 100644
--- a/src/llama_stack_client/resources/chat/__init__.py
+++ b/src/llama_stack_client/resources/chat/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .chat import (
diff --git a/src/llama_stack_client/resources/chat/chat.py b/src/llama_stack_client/resources/chat/chat.py
index 3e3715c1..17f8fdb8 100644
--- a/src/llama_stack_client/resources/chat/chat.py
+++ b/src/llama_stack_client/resources/chat/chat.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/chat/completions.py b/src/llama_stack_client/resources/chat/completions.py
index bfcb0e73..5986a4d8 100644
--- a/src/llama_stack_client/resources/chat/completions.py
+++ b/src/llama_stack_client/resources/chat/completions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/completions.py b/src/llama_stack_client/resources/completions.py
index a4a148d9..1f7ddb60 100644
--- a/src/llama_stack_client/resources/completions.py
+++ b/src/llama_stack_client/resources/completions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/conversations/__init__.py b/src/llama_stack_client/resources/conversations/__init__.py
index 2dc61926..efa576e3 100644
--- a/src/llama_stack_client/resources/conversations/__init__.py
+++ b/src/llama_stack_client/resources/conversations/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .items import (
diff --git a/src/llama_stack_client/resources/conversations/conversations.py b/src/llama_stack_client/resources/conversations/conversations.py
index 14be8591..66dfe7d3 100644
--- a/src/llama_stack_client/resources/conversations/conversations.py
+++ b/src/llama_stack_client/resources/conversations/conversations.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -68,7 +74,8 @@ def create(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationObject:
- """
+ """Create a conversation.
+
Create a conversation.
Args:
@@ -110,7 +117,8 @@ def retrieve(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationObject:
- """
+ """Retrieve a conversation.
+
Get a conversation with the given ID.
Args:
@@ -144,7 +152,8 @@ def update(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationObject:
- """
+ """Update a conversation.
+
Update a conversation's metadata with the given ID.
Args:
@@ -180,7 +189,8 @@ def delete(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationDeleteResponse:
- """
+ """Delete a conversation.
+
Delete a conversation with the given ID.
Args:
@@ -239,7 +249,8 @@ async def create(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationObject:
- """
+ """Create a conversation.
+
Create a conversation.
Args:
@@ -281,7 +292,8 @@ async def retrieve(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationObject:
- """
+ """Retrieve a conversation.
+
Get a conversation with the given ID.
Args:
@@ -315,7 +327,8 @@ async def update(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationObject:
- """
+ """Update a conversation.
+
Update a conversation's metadata with the given ID.
Args:
@@ -353,7 +366,8 @@ async def delete(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ConversationDeleteResponse:
- """
+ """Delete a conversation.
+
Delete a conversation with the given ID.
Args:
diff --git a/src/llama_stack_client/resources/conversations/items.py b/src/llama_stack_client/resources/conversations/items.py
index 76d246f1..4850adf8 100644
--- a/src/llama_stack_client/resources/conversations/items.py
+++ b/src/llama_stack_client/resources/conversations/items.py
@@ -1,13 +1,19 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
-from typing import Any, List, Union, Iterable, cast
+from typing import Any, List, Iterable, cast
from typing_extensions import Literal
import httpx
-from ..._types import Body, Query, Headers, NotGiven, not_given
+from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -17,7 +23,8 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
+from ...pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
+from ..._base_client import AsyncPaginator, make_request_options
from ...types.conversations import item_list_params, item_create_params
from ...types.conversations.item_get_response import ItemGetResponse
from ...types.conversations.item_list_response import ItemListResponse
@@ -58,7 +65,8 @@ def create(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ItemCreateResponse:
- """
+ """Create items.
+
Create items in the conversation.
Args:
@@ -87,30 +95,30 @@ def list(
self,
conversation_id: str,
*,
- after: Union[str, object],
- include: Union[
- List[
- Literal[
- "code_interpreter_call.outputs",
- "computer_call_output.output.image_url",
- "file_search_call.results",
- "message.input_image.image_url",
- "message.output_text.logprobs",
- "reasoning.encrypted_content",
- ]
- ],
- object,
- ],
- limit: Union[int, object],
- order: Union[Literal["asc", "desc"], object],
+ after: str | Omit = omit,
+ include: List[
+ Literal[
+ "web_search_call.action.sources",
+ "code_interpreter_call.outputs",
+ "computer_call_output.output.image_url",
+ "file_search_call.results",
+ "message.input_image.image_url",
+ "message.output_text.logprobs",
+ "reasoning.encrypted_content",
+ ]
+ ]
+ | Omit = omit,
+ limit: int | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> ItemListResponse:
- """
+ ) -> SyncOpenAICursorPage[ItemListResponse]:
+ """List items.
+
List items in the conversation.
Args:
@@ -132,8 +140,9 @@ def list(
"""
if not conversation_id:
raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}")
- return self._get(
+ return self._get_api_list(
f"/v1/conversations/{conversation_id}/items",
+ page=SyncOpenAICursorPage[ItemListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -149,7 +158,7 @@ def list(
item_list_params.ItemListParams,
),
),
- cast_to=ItemListResponse,
+ model=cast(Any, ItemListResponse), # Union types cannot be passed in as arguments in the type system
)
def get(
@@ -164,7 +173,8 @@ def get(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ItemGetResponse:
- """
+ """Retrieve an item.
+
Retrieve a conversation item.
Args:
@@ -224,7 +234,8 @@ async def create(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ItemCreateResponse:
- """
+ """Create items.
+
Create items in the conversation.
Args:
@@ -249,34 +260,34 @@ async def create(
cast_to=ItemCreateResponse,
)
- async def list(
+ def list(
self,
conversation_id: str,
*,
- after: Union[str, object],
- include: Union[
- List[
- Literal[
- "code_interpreter_call.outputs",
- "computer_call_output.output.image_url",
- "file_search_call.results",
- "message.input_image.image_url",
- "message.output_text.logprobs",
- "reasoning.encrypted_content",
- ]
- ],
- object,
- ],
- limit: Union[int, object],
- order: Union[Literal["asc", "desc"], object],
+ after: str | Omit = omit,
+ include: List[
+ Literal[
+ "web_search_call.action.sources",
+ "code_interpreter_call.outputs",
+ "computer_call_output.output.image_url",
+ "file_search_call.results",
+ "message.input_image.image_url",
+ "message.output_text.logprobs",
+ "reasoning.encrypted_content",
+ ]
+ ]
+ | Omit = omit,
+ limit: int | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> ItemListResponse:
- """
+ ) -> AsyncPaginator[ItemListResponse, AsyncOpenAICursorPage[ItemListResponse]]:
+ """List items.
+
List items in the conversation.
Args:
@@ -298,14 +309,15 @@ async def list(
"""
if not conversation_id:
raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}")
- return await self._get(
+ return self._get_api_list(
f"/v1/conversations/{conversation_id}/items",
+ page=AsyncOpenAICursorPage[ItemListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"after": after,
"include": include,
@@ -315,7 +327,7 @@ async def list(
item_list_params.ItemListParams,
),
),
- cast_to=ItemListResponse,
+ model=cast(Any, ItemListResponse), # Union types cannot be passed in as arguments in the type system
)
async def get(
@@ -330,7 +342,8 @@ async def get(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ItemGetResponse:
- """
+ """Retrieve an item.
+
Retrieve a conversation item.
Args:
diff --git a/src/llama_stack_client/resources/embeddings.py b/src/llama_stack_client/resources/embeddings.py
index 512695f6..b1cbd018 100644
--- a/src/llama_stack_client/resources/embeddings.py
+++ b/src/llama_stack_client/resources/embeddings.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/files.py b/src/llama_stack_client/resources/files.py
index 64071d91..5916dd3b 100644
--- a/src/llama_stack_client/resources/files.py
+++ b/src/llama_stack_client/resources/files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/inspect.py b/src/llama_stack_client/resources/inspect.py
index 45c94e8b..eaa4b9f7 100644
--- a/src/llama_stack_client/resources/inspect.py
+++ b/src/llama_stack_client/resources/inspect.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/models/__init__.py b/src/llama_stack_client/resources/models/__init__.py
index fc06a000..9c1eb52b 100644
--- a/src/llama_stack_client/resources/models/__init__.py
+++ b/src/llama_stack_client/resources/models/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .models import (
diff --git a/src/llama_stack_client/resources/models/models.py b/src/llama_stack_client/resources/models/models.py
index 376b6f33..99ebccdd 100644
--- a/src/llama_stack_client/resources/models/models.py
+++ b/src/llama_stack_client/resources/models/models.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -120,7 +126,7 @@ def register(
*,
model_id: str,
metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
- model_type: Literal["llm", "embedding"] | Omit = omit,
+ model_type: Literal["llm", "embedding", "rerank"] | Omit = omit,
provider_id: str | Omit = omit,
provider_model_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -293,7 +299,7 @@ async def register(
*,
model_id: str,
metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
- model_type: Literal["llm", "embedding"] | Omit = omit,
+ model_type: Literal["llm", "embedding", "rerank"] | Omit = omit,
provider_id: str | Omit = omit,
provider_model_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
diff --git a/src/llama_stack_client/resources/models/openai.py b/src/llama_stack_client/resources/models/openai.py
index ab4b4038..c5ff1738 100644
--- a/src/llama_stack_client/resources/models/openai.py
+++ b/src/llama_stack_client/resources/models/openai.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/moderations.py b/src/llama_stack_client/resources/moderations.py
index 199a8d0a..420641ac 100644
--- a/src/llama_stack_client/resources/moderations.py
+++ b/src/llama_stack_client/resources/moderations.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/providers.py b/src/llama_stack_client/resources/providers.py
index f8fea984..19222338 100644
--- a/src/llama_stack_client/resources/providers.py
+++ b/src/llama_stack_client/resources/providers.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/responses/__init__.py b/src/llama_stack_client/resources/responses/__init__.py
index 230ef765..d1e33c97 100644
--- a/src/llama_stack_client/resources/responses/__init__.py
+++ b/src/llama_stack_client/resources/responses/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .responses import (
diff --git a/src/llama_stack_client/resources/responses/input_items.py b/src/llama_stack_client/resources/responses/input_items.py
index d6643a3b..6e802d13 100644
--- a/src/llama_stack_client/resources/responses/input_items.py
+++ b/src/llama_stack_client/resources/responses/input_items.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/responses/responses.py b/src/llama_stack_client/resources/responses/responses.py
index caa60802..6bc29a62 100644
--- a/src/llama_stack_client/resources/responses/responses.py
+++ b/src/llama_stack_client/resources/responses/responses.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/routes.py b/src/llama_stack_client/resources/routes.py
index fc584215..0797d00f 100644
--- a/src/llama_stack_client/resources/routes.py
+++ b/src/llama_stack_client/resources/routes.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/safety.py b/src/llama_stack_client/resources/safety.py
index 38aaf19e..77972161 100644
--- a/src/llama_stack_client/resources/safety.py
+++ b/src/llama_stack_client/resources/safety.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/scoring.py b/src/llama_stack_client/resources/scoring.py
index dddf8002..171cabaf 100644
--- a/src/llama_stack_client/resources/scoring.py
+++ b/src/llama_stack_client/resources/scoring.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/scoring_functions.py b/src/llama_stack_client/resources/scoring_functions.py
index ea3d8a6d..c172c8b2 100644
--- a/src/llama_stack_client/resources/scoring_functions.py
+++ b/src/llama_stack_client/resources/scoring_functions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/shields.py b/src/llama_stack_client/resources/shields.py
index a7893939..e9512c36 100644
--- a/src/llama_stack_client/resources/shields.py
+++ b/src/llama_stack_client/resources/shields.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/synthetic_data_generation.py b/src/llama_stack_client/resources/synthetic_data_generation.py
index 55eddcad..c8c3d431 100644
--- a/src/llama_stack_client/resources/synthetic_data_generation.py
+++ b/src/llama_stack_client/resources/synthetic_data_generation.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/telemetry.py b/src/llama_stack_client/resources/telemetry.py
deleted file mode 100644
index 972b8fdf..00000000
--- a/src/llama_stack_client/resources/telemetry.py
+++ /dev/null
@@ -1,865 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Type, Iterable, cast
-from typing_extensions import Literal
-
-import httpx
-
-from ..types import (
- telemetry_query_spans_params,
- telemetry_query_traces_params,
- telemetry_get_span_tree_params,
- telemetry_query_metrics_params,
- telemetry_save_spans_to_dataset_params,
-)
-from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
- to_raw_response_wrapper,
- to_streamed_response_wrapper,
- async_to_raw_response_wrapper,
- async_to_streamed_response_wrapper,
-)
-from .._wrappers import DataWrapper
-from ..types.trace import Trace
-from .._base_client import make_request_options
-from ..types.query_condition_param import QueryConditionParam
-from ..types.telemetry_get_span_response import TelemetryGetSpanResponse
-from ..types.telemetry_query_spans_response import TelemetryQuerySpansResponse
-from ..types.telemetry_query_traces_response import TelemetryQueryTracesResponse
-from ..types.telemetry_get_span_tree_response import TelemetryGetSpanTreeResponse
-from ..types.telemetry_query_metrics_response import TelemetryQueryMetricsResponse
-
-__all__ = ["TelemetryResource", "AsyncTelemetryResource"]
-
-
-class TelemetryResource(SyncAPIResource):
- @cached_property
- def with_raw_response(self) -> TelemetryResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers
- """
- return TelemetryResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> TelemetryResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response
- """
- return TelemetryResourceWithStreamingResponse(self)
-
- def get_span(
- self,
- span_id: str,
- *,
- trace_id: str,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryGetSpanResponse:
- """
- Get a span by its ID.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not trace_id:
- raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}")
- if not span_id:
- raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}")
- return self._get(
- f"/v1alpha/telemetry/traces/{trace_id}/spans/{span_id}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=TelemetryGetSpanResponse,
- )
-
- def get_span_tree(
- self,
- span_id: str,
- *,
- attributes_to_return: SequenceNotStr[str] | Omit = omit,
- max_depth: int | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryGetSpanTreeResponse:
- """
- Get a span tree by its ID.
-
- Args:
- attributes_to_return: The attributes to return in the tree.
-
- max_depth: The maximum depth of the tree.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not span_id:
- raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}")
- return self._post(
- f"/v1alpha/telemetry/spans/{span_id}/tree",
- body=maybe_transform(
- {
- "attributes_to_return": attributes_to_return,
- "max_depth": max_depth,
- },
- telemetry_get_span_tree_params.TelemetryGetSpanTreeParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryGetSpanTreeResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryGetSpanTreeResponse], DataWrapper[TelemetryGetSpanTreeResponse]),
- )
-
- def get_trace(
- self,
- trace_id: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> Trace:
- """
- Get a trace by its ID.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not trace_id:
- raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}")
- return self._get(
- f"/v1alpha/telemetry/traces/{trace_id}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Trace,
- )
-
- def query_metrics(
- self,
- metric_name: str,
- *,
- query_type: Literal["range", "instant"],
- start_time: int,
- end_time: int | Omit = omit,
- granularity: str | Omit = omit,
- label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryQueryMetricsResponse:
- """
- Query metrics.
-
- Args:
- query_type: The type of query to perform.
-
- start_time: The start time of the metric to query.
-
- end_time: The end time of the metric to query.
-
- granularity: The granularity of the metric to query.
-
- label_matchers: The label matchers to apply to the metric.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not metric_name:
- raise ValueError(f"Expected a non-empty value for `metric_name` but received {metric_name!r}")
- return self._post(
- f"/v1alpha/telemetry/metrics/{metric_name}",
- body=maybe_transform(
- {
- "query_type": query_type,
- "start_time": start_time,
- "end_time": end_time,
- "granularity": granularity,
- "label_matchers": label_matchers,
- },
- telemetry_query_metrics_params.TelemetryQueryMetricsParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryQueryMetricsResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryQueryMetricsResponse], DataWrapper[TelemetryQueryMetricsResponse]),
- )
-
- def query_spans(
- self,
- *,
- attribute_filters: Iterable[QueryConditionParam],
- attributes_to_return: SequenceNotStr[str],
- max_depth: int | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryQuerySpansResponse:
- """
- Query spans.
-
- Args:
- attribute_filters: The attribute filters to apply to the spans.
-
- attributes_to_return: The attributes to return in the spans.
-
- max_depth: The maximum depth of the tree.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- return self._post(
- "/v1alpha/telemetry/spans",
- body=maybe_transform(
- {
- "attribute_filters": attribute_filters,
- "attributes_to_return": attributes_to_return,
- "max_depth": max_depth,
- },
- telemetry_query_spans_params.TelemetryQuerySpansParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryQuerySpansResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryQuerySpansResponse], DataWrapper[TelemetryQuerySpansResponse]),
- )
-
- def query_traces(
- self,
- *,
- attribute_filters: Iterable[QueryConditionParam] | Omit = omit,
- limit: int | Omit = omit,
- offset: int | Omit = omit,
- order_by: SequenceNotStr[str] | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryQueryTracesResponse:
- """
- Query traces.
-
- Args:
- attribute_filters: The attribute filters to apply to the traces.
-
- limit: The limit of traces to return.
-
- offset: The offset of the traces to return.
-
- order_by: The order by of the traces to return.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- return self._post(
- "/v1alpha/telemetry/traces",
- body=maybe_transform(
- {
- "attribute_filters": attribute_filters,
- "limit": limit,
- "offset": offset,
- "order_by": order_by,
- },
- telemetry_query_traces_params.TelemetryQueryTracesParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryQueryTracesResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryQueryTracesResponse], DataWrapper[TelemetryQueryTracesResponse]),
- )
-
- def save_spans_to_dataset(
- self,
- *,
- attribute_filters: Iterable[QueryConditionParam],
- attributes_to_save: SequenceNotStr[str],
- dataset_id: str,
- max_depth: int | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> None:
- """
- Save spans to a dataset.
-
- Args:
- attribute_filters: The attribute filters to apply to the spans.
-
- attributes_to_save: The attributes to save to the dataset.
-
- dataset_id: The ID of the dataset to save the spans to.
-
- max_depth: The maximum depth of the tree.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- extra_headers = {"Accept": "*/*", **(extra_headers or {})}
- return self._post(
- "/v1alpha/telemetry/spans/export",
- body=maybe_transform(
- {
- "attribute_filters": attribute_filters,
- "attributes_to_save": attributes_to_save,
- "dataset_id": dataset_id,
- "max_depth": max_depth,
- },
- telemetry_save_spans_to_dataset_params.TelemetrySaveSpansToDatasetParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=NoneType,
- )
-
-
-class AsyncTelemetryResource(AsyncAPIResource):
- @cached_property
- def with_raw_response(self) -> AsyncTelemetryResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers
- """
- return AsyncTelemetryResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AsyncTelemetryResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response
- """
- return AsyncTelemetryResourceWithStreamingResponse(self)
-
- async def get_span(
- self,
- span_id: str,
- *,
- trace_id: str,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryGetSpanResponse:
- """
- Get a span by its ID.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not trace_id:
- raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}")
- if not span_id:
- raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}")
- return await self._get(
- f"/v1alpha/telemetry/traces/{trace_id}/spans/{span_id}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=TelemetryGetSpanResponse,
- )
-
- async def get_span_tree(
- self,
- span_id: str,
- *,
- attributes_to_return: SequenceNotStr[str] | Omit = omit,
- max_depth: int | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryGetSpanTreeResponse:
- """
- Get a span tree by its ID.
-
- Args:
- attributes_to_return: The attributes to return in the tree.
-
- max_depth: The maximum depth of the tree.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not span_id:
- raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}")
- return await self._post(
- f"/v1alpha/telemetry/spans/{span_id}/tree",
- body=await async_maybe_transform(
- {
- "attributes_to_return": attributes_to_return,
- "max_depth": max_depth,
- },
- telemetry_get_span_tree_params.TelemetryGetSpanTreeParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryGetSpanTreeResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryGetSpanTreeResponse], DataWrapper[TelemetryGetSpanTreeResponse]),
- )
-
- async def get_trace(
- self,
- trace_id: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> Trace:
- """
- Get a trace by its ID.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not trace_id:
- raise ValueError(f"Expected a non-empty value for `trace_id` but received {trace_id!r}")
- return await self._get(
- f"/v1alpha/telemetry/traces/{trace_id}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Trace,
- )
-
- async def query_metrics(
- self,
- metric_name: str,
- *,
- query_type: Literal["range", "instant"],
- start_time: int,
- end_time: int | Omit = omit,
- granularity: str | Omit = omit,
- label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryQueryMetricsResponse:
- """
- Query metrics.
-
- Args:
- query_type: The type of query to perform.
-
- start_time: The start time of the metric to query.
-
- end_time: The end time of the metric to query.
-
- granularity: The granularity of the metric to query.
-
- label_matchers: The label matchers to apply to the metric.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not metric_name:
- raise ValueError(f"Expected a non-empty value for `metric_name` but received {metric_name!r}")
- return await self._post(
- f"/v1alpha/telemetry/metrics/{metric_name}",
- body=await async_maybe_transform(
- {
- "query_type": query_type,
- "start_time": start_time,
- "end_time": end_time,
- "granularity": granularity,
- "label_matchers": label_matchers,
- },
- telemetry_query_metrics_params.TelemetryQueryMetricsParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryQueryMetricsResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryQueryMetricsResponse], DataWrapper[TelemetryQueryMetricsResponse]),
- )
-
- async def query_spans(
- self,
- *,
- attribute_filters: Iterable[QueryConditionParam],
- attributes_to_return: SequenceNotStr[str],
- max_depth: int | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryQuerySpansResponse:
- """
- Query spans.
-
- Args:
- attribute_filters: The attribute filters to apply to the spans.
-
- attributes_to_return: The attributes to return in the spans.
-
- max_depth: The maximum depth of the tree.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- return await self._post(
- "/v1alpha/telemetry/spans",
- body=await async_maybe_transform(
- {
- "attribute_filters": attribute_filters,
- "attributes_to_return": attributes_to_return,
- "max_depth": max_depth,
- },
- telemetry_query_spans_params.TelemetryQuerySpansParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryQuerySpansResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryQuerySpansResponse], DataWrapper[TelemetryQuerySpansResponse]),
- )
-
- async def query_traces(
- self,
- *,
- attribute_filters: Iterable[QueryConditionParam] | Omit = omit,
- limit: int | Omit = omit,
- offset: int | Omit = omit,
- order_by: SequenceNotStr[str] | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> TelemetryQueryTracesResponse:
- """
- Query traces.
-
- Args:
- attribute_filters: The attribute filters to apply to the traces.
-
- limit: The limit of traces to return.
-
- offset: The offset of the traces to return.
-
- order_by: The order by of the traces to return.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- return await self._post(
- "/v1alpha/telemetry/traces",
- body=await async_maybe_transform(
- {
- "attribute_filters": attribute_filters,
- "limit": limit,
- "offset": offset,
- "order_by": order_by,
- },
- telemetry_query_traces_params.TelemetryQueryTracesParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers,
- extra_query=extra_query,
- extra_body=extra_body,
- timeout=timeout,
- post_parser=DataWrapper[TelemetryQueryTracesResponse]._unwrapper,
- ),
- cast_to=cast(Type[TelemetryQueryTracesResponse], DataWrapper[TelemetryQueryTracesResponse]),
- )
-
- async def save_spans_to_dataset(
- self,
- *,
- attribute_filters: Iterable[QueryConditionParam],
- attributes_to_save: SequenceNotStr[str],
- dataset_id: str,
- max_depth: int | Omit = omit,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> None:
- """
- Save spans to a dataset.
-
- Args:
- attribute_filters: The attribute filters to apply to the spans.
-
- attributes_to_save: The attributes to save to the dataset.
-
- dataset_id: The ID of the dataset to save the spans to.
-
- max_depth: The maximum depth of the tree.
-
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- extra_headers = {"Accept": "*/*", **(extra_headers or {})}
- return await self._post(
- "/v1alpha/telemetry/spans/export",
- body=await async_maybe_transform(
- {
- "attribute_filters": attribute_filters,
- "attributes_to_save": attributes_to_save,
- "dataset_id": dataset_id,
- "max_depth": max_depth,
- },
- telemetry_save_spans_to_dataset_params.TelemetrySaveSpansToDatasetParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=NoneType,
- )
-
-
-class TelemetryResourceWithRawResponse:
- def __init__(self, telemetry: TelemetryResource) -> None:
- self._telemetry = telemetry
-
- self.get_span = to_raw_response_wrapper(
- telemetry.get_span,
- )
- self.get_span_tree = to_raw_response_wrapper(
- telemetry.get_span_tree,
- )
- self.get_trace = to_raw_response_wrapper(
- telemetry.get_trace,
- )
- self.query_metrics = to_raw_response_wrapper(
- telemetry.query_metrics,
- )
- self.query_spans = to_raw_response_wrapper(
- telemetry.query_spans,
- )
- self.query_traces = to_raw_response_wrapper(
- telemetry.query_traces,
- )
- self.save_spans_to_dataset = to_raw_response_wrapper(
- telemetry.save_spans_to_dataset,
- )
-
-
-class AsyncTelemetryResourceWithRawResponse:
- def __init__(self, telemetry: AsyncTelemetryResource) -> None:
- self._telemetry = telemetry
-
- self.get_span = async_to_raw_response_wrapper(
- telemetry.get_span,
- )
- self.get_span_tree = async_to_raw_response_wrapper(
- telemetry.get_span_tree,
- )
- self.get_trace = async_to_raw_response_wrapper(
- telemetry.get_trace,
- )
- self.query_metrics = async_to_raw_response_wrapper(
- telemetry.query_metrics,
- )
- self.query_spans = async_to_raw_response_wrapper(
- telemetry.query_spans,
- )
- self.query_traces = async_to_raw_response_wrapper(
- telemetry.query_traces,
- )
- self.save_spans_to_dataset = async_to_raw_response_wrapper(
- telemetry.save_spans_to_dataset,
- )
-
-
-class TelemetryResourceWithStreamingResponse:
- def __init__(self, telemetry: TelemetryResource) -> None:
- self._telemetry = telemetry
-
- self.get_span = to_streamed_response_wrapper(
- telemetry.get_span,
- )
- self.get_span_tree = to_streamed_response_wrapper(
- telemetry.get_span_tree,
- )
- self.get_trace = to_streamed_response_wrapper(
- telemetry.get_trace,
- )
- self.query_metrics = to_streamed_response_wrapper(
- telemetry.query_metrics,
- )
- self.query_spans = to_streamed_response_wrapper(
- telemetry.query_spans,
- )
- self.query_traces = to_streamed_response_wrapper(
- telemetry.query_traces,
- )
- self.save_spans_to_dataset = to_streamed_response_wrapper(
- telemetry.save_spans_to_dataset,
- )
-
-
-class AsyncTelemetryResourceWithStreamingResponse:
- def __init__(self, telemetry: AsyncTelemetryResource) -> None:
- self._telemetry = telemetry
-
- self.get_span = async_to_streamed_response_wrapper(
- telemetry.get_span,
- )
- self.get_span_tree = async_to_streamed_response_wrapper(
- telemetry.get_span_tree,
- )
- self.get_trace = async_to_streamed_response_wrapper(
- telemetry.get_trace,
- )
- self.query_metrics = async_to_streamed_response_wrapper(
- telemetry.query_metrics,
- )
- self.query_spans = async_to_streamed_response_wrapper(
- telemetry.query_spans,
- )
- self.query_traces = async_to_streamed_response_wrapper(
- telemetry.query_traces,
- )
- self.save_spans_to_dataset = async_to_streamed_response_wrapper(
- telemetry.save_spans_to_dataset,
- )
diff --git a/src/llama_stack_client/resources/tool_runtime/__init__.py b/src/llama_stack_client/resources/tool_runtime/__init__.py
index 2ed86a39..6c4f54ac 100644
--- a/src/llama_stack_client/resources/tool_runtime/__init__.py
+++ b/src/llama_stack_client/resources/tool_runtime/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .rag_tool import (
diff --git a/src/llama_stack_client/resources/tool_runtime/rag_tool.py b/src/llama_stack_client/resources/tool_runtime/rag_tool.py
index 5b3bd4d3..af4a7d64 100644
--- a/src/llama_stack_client/resources/tool_runtime/rag_tool.py
+++ b/src/llama_stack_client/resources/tool_runtime/rag_tool.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/tool_runtime/tool_runtime.py b/src/llama_stack_client/resources/tool_runtime/tool_runtime.py
index db90cb22..a586ef79 100644
--- a/src/llama_stack_client/resources/tool_runtime/tool_runtime.py
+++ b/src/llama_stack_client/resources/tool_runtime/tool_runtime.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/toolgroups.py b/src/llama_stack_client/resources/toolgroups.py
index 31551d20..8839ee93 100644
--- a/src/llama_stack_client/resources/toolgroups.py
+++ b/src/llama_stack_client/resources/toolgroups.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/tools.py b/src/llama_stack_client/resources/tools.py
index adbf4402..e1a159eb 100644
--- a/src/llama_stack_client/resources/tools.py
+++ b/src/llama_stack_client/resources/tools.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/vector_io.py b/src/llama_stack_client/resources/vector_io.py
index f9647342..2659c139 100644
--- a/src/llama_stack_client/resources/vector_io.py
+++ b/src/llama_stack_client/resources/vector_io.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/vector_stores/__init__.py b/src/llama_stack_client/resources/vector_stores/__init__.py
index d83a42b6..edd010ed 100644
--- a/src/llama_stack_client/resources/vector_stores/__init__.py
+++ b/src/llama_stack_client/resources/vector_stores/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .files import (
diff --git a/src/llama_stack_client/resources/vector_stores/file_batches.py b/src/llama_stack_client/resources/vector_stores/file_batches.py
index 9d1ecf30..adf80ccc 100644
--- a/src/llama_stack_client/resources/vector_stores/file_batches.py
+++ b/src/llama_stack_client/resources/vector_stores/file_batches.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/vector_stores/files.py b/src/llama_stack_client/resources/vector_stores/files.py
index f9a1ef31..365a87e6 100644
--- a/src/llama_stack_client/resources/vector_stores/files.py
+++ b/src/llama_stack_client/resources/vector_stores/files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/resources/vector_stores/vector_stores.py b/src/llama_stack_client/resources/vector_stores/vector_stores.py
index 5c6e00f7..2462972f 100644
--- a/src/llama_stack_client/resources/vector_stores/vector_stores.py
+++ b/src/llama_stack_client/resources/vector_stores/vector_stores.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/__init__.py b/src/llama_stack_client/types/__init__.py
index 5660e0e2..173a1e03 100644
--- a/src/llama_stack_client/types/__init__.py
+++ b/src/llama_stack_client/types/__init__.py
@@ -1,10 +1,15 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from .file import File as File
from .model import Model as Model
-from .trace import Trace as Trace
from .shared import (
Message as Message,
Document as Document,
@@ -26,7 +31,6 @@
)
from .shield import Shield as Shield
from .tool_def import ToolDef as ToolDef
-from .benchmark import Benchmark as Benchmark
from .route_info import RouteInfo as RouteInfo
from .scoring_fn import ScoringFn as ScoringFn
from .tool_group import ToolGroup as ToolGroup
@@ -38,7 +42,6 @@
from .create_response import CreateResponse as CreateResponse
from .response_object import ResponseObject as ResponseObject
from .file_list_params import FileListParams as FileListParams
-from .span_with_status import SpanWithStatus as SpanWithStatus
from .tool_list_params import ToolListParams as ToolListParams
from .scoring_fn_params import ScoringFnParams as ScoringFnParams
from .file_create_params import FileCreateParams as FileCreateParams
@@ -51,17 +54,13 @@
from .delete_file_response import DeleteFileResponse as DeleteFileResponse
from .list_models_response import ListModelsResponse as ListModelsResponse
from .list_routes_response import ListRoutesResponse as ListRoutesResponse
-from .query_spans_response import QuerySpansResponse as QuerySpansResponse
from .response_list_params import ResponseListParams as ResponseListParams
from .scoring_score_params import ScoringScoreParams as ScoringScoreParams
from .shield_list_response import ShieldListResponse as ShieldListResponse
from .chat_completion_chunk import ChatCompletionChunk as ChatCompletionChunk
-from .dataset_list_response import DatasetListResponse as DatasetListResponse
from .list_shields_response import ListShieldsResponse as ListShieldsResponse
from .model_register_params import ModelRegisterParams as ModelRegisterParams
from .query_chunks_response import QueryChunksResponse as QueryChunksResponse
-from .query_condition_param import QueryConditionParam as QueryConditionParam
-from .list_datasets_response import ListDatasetsResponse as ListDatasetsResponse
from .provider_list_response import ProviderListResponse as ProviderListResponse
from .response_create_params import ResponseCreateParams as ResponseCreateParams
from .response_list_response import ResponseListResponse as ResponseListResponse
@@ -70,25 +69,16 @@
from .shield_register_params import ShieldRegisterParams as ShieldRegisterParams
from .tool_invocation_result import ToolInvocationResult as ToolInvocationResult
from .vector_io_query_params import VectorIoQueryParams as VectorIoQueryParams
-from .benchmark_list_response import BenchmarkListResponse as BenchmarkListResponse
-from .dataset_iterrows_params import DatasetIterrowsParams as DatasetIterrowsParams
-from .dataset_register_params import DatasetRegisterParams as DatasetRegisterParams
from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams
from .list_providers_response import ListProvidersResponse as ListProvidersResponse
from .scoring_fn_params_param import ScoringFnParamsParam as ScoringFnParamsParam
from .toolgroup_list_response import ToolgroupListResponse as ToolgroupListResponse
from .vector_io_insert_params import VectorIoInsertParams as VectorIoInsertParams
from .completion_create_params import CompletionCreateParams as CompletionCreateParams
-from .list_benchmarks_response import ListBenchmarksResponse as ListBenchmarksResponse
from .moderation_create_params import ModerationCreateParams as ModerationCreateParams
from .response_delete_response import ResponseDeleteResponse as ResponseDeleteResponse
from .safety_run_shield_params import SafetyRunShieldParams as SafetyRunShieldParams
from .vector_store_list_params import VectorStoreListParams as VectorStoreListParams
-from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams
-from .dataset_appendrows_params import DatasetAppendrowsParams as DatasetAppendrowsParams
-from .dataset_iterrows_response import DatasetIterrowsResponse as DatasetIterrowsResponse
-from .dataset_register_response import DatasetRegisterResponse as DatasetRegisterResponse
-from .dataset_retrieve_response import DatasetRetrieveResponse as DatasetRetrieveResponse
from .list_tool_groups_response import ListToolGroupsResponse as ListToolGroupsResponse
from .toolgroup_register_params import ToolgroupRegisterParams as ToolgroupRegisterParams
from .completion_create_response import CompletionCreateResponse as CompletionCreateResponse
@@ -100,29 +90,17 @@
from .vector_store_search_params import VectorStoreSearchParams as VectorStoreSearchParams
from .vector_store_update_params import VectorStoreUpdateParams as VectorStoreUpdateParams
from .list_vector_stores_response import ListVectorStoresResponse as ListVectorStoresResponse
-from .telemetry_get_span_response import TelemetryGetSpanResponse as TelemetryGetSpanResponse
from .conversation_delete_response import ConversationDeleteResponse as ConversationDeleteResponse
from .scoring_score_batch_response import ScoringScoreBatchResponse as ScoringScoreBatchResponse
-from .telemetry_query_spans_params import TelemetryQuerySpansParams as TelemetryQuerySpansParams
from .vector_store_delete_response import VectorStoreDeleteResponse as VectorStoreDeleteResponse
from .vector_store_search_response import VectorStoreSearchResponse as VectorStoreSearchResponse
-from .telemetry_query_traces_params import TelemetryQueryTracesParams as TelemetryQueryTracesParams
from .scoring_function_list_response import ScoringFunctionListResponse as ScoringFunctionListResponse
-from .telemetry_get_span_tree_params import TelemetryGetSpanTreeParams as TelemetryGetSpanTreeParams
-from .telemetry_query_metrics_params import TelemetryQueryMetricsParams as TelemetryQueryMetricsParams
-from .telemetry_query_spans_response import TelemetryQuerySpansResponse as TelemetryQuerySpansResponse
from .tool_runtime_list_tools_params import ToolRuntimeListToolsParams as ToolRuntimeListToolsParams
from .list_scoring_functions_response import ListScoringFunctionsResponse as ListScoringFunctionsResponse
-from .telemetry_query_traces_response import TelemetryQueryTracesResponse as TelemetryQueryTracesResponse
from .tool_runtime_invoke_tool_params import ToolRuntimeInvokeToolParams as ToolRuntimeInvokeToolParams
from .scoring_function_register_params import ScoringFunctionRegisterParams as ScoringFunctionRegisterParams
-from .telemetry_get_span_tree_response import TelemetryGetSpanTreeResponse as TelemetryGetSpanTreeResponse
-from .telemetry_query_metrics_response import TelemetryQueryMetricsResponse as TelemetryQueryMetricsResponse
from .tool_runtime_list_tools_response import ToolRuntimeListToolsResponse as ToolRuntimeListToolsResponse
from .synthetic_data_generation_response import SyntheticDataGenerationResponse as SyntheticDataGenerationResponse
-from .telemetry_save_spans_to_dataset_params import (
- TelemetrySaveSpansToDatasetParams as TelemetrySaveSpansToDatasetParams,
-)
from .synthetic_data_generation_generate_params import (
SyntheticDataGenerationGenerateParams as SyntheticDataGenerationGenerateParams,
)
diff --git a/src/llama_stack_client/types/alpha/__init__.py b/src/llama_stack_client/types/alpha/__init__.py
index 9651e73a..61e02a4e 100644
--- a/src/llama_stack_client/types/alpha/__init__.py
+++ b/src/llama_stack_client/types/alpha/__init__.py
@@ -1,8 +1,15 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from .job import Job as Job
+from .benchmark import Benchmark as Benchmark
from .tool_response import ToolResponse as ToolResponse
from .inference_step import InferenceStep as InferenceStep
from .shield_call_step import ShieldCallStep as ShieldCallStep
@@ -19,7 +26,10 @@
from .algorithm_config_param import AlgorithmConfigParam as AlgorithmConfigParam
from .benchmark_config_param import BenchmarkConfigParam as BenchmarkConfigParam
from .agent_retrieve_response import AgentRetrieveResponse as AgentRetrieveResponse
+from .benchmark_list_response import BenchmarkListResponse as BenchmarkListResponse
from .inference_rerank_params import InferenceRerankParams as InferenceRerankParams
+from .list_benchmarks_response import ListBenchmarksResponse as ListBenchmarksResponse
+from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams
from .eval_evaluate_rows_params import EvalEvaluateRowsParams as EvalEvaluateRowsParams
from .inference_rerank_response import InferenceRerankResponse as InferenceRerankResponse
from .eval_run_eval_alpha_params import EvalRunEvalAlphaParams as EvalRunEvalAlphaParams
diff --git a/src/llama_stack_client/types/alpha/agent_create_params.py b/src/llama_stack_client/types/alpha/agent_create_params.py
index 368704b2..9c420379 100644
--- a/src/llama_stack_client/types/alpha/agent_create_params.py
+++ b/src/llama_stack_client/types/alpha/agent_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agent_create_response.py b/src/llama_stack_client/types/alpha/agent_create_response.py
index 9b155198..70e7d98b 100644
--- a/src/llama_stack_client/types/alpha/agent_create_response.py
+++ b/src/llama_stack_client/types/alpha/agent_create_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ..._models import BaseModel
diff --git a/src/llama_stack_client/types/alpha/agent_list_params.py b/src/llama_stack_client/types/alpha/agent_list_params.py
index 15da545b..0b50ef24 100644
--- a/src/llama_stack_client/types/alpha/agent_list_params.py
+++ b/src/llama_stack_client/types/alpha/agent_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agent_list_response.py b/src/llama_stack_client/types/alpha/agent_list_response.py
index 69de5001..212a4a9f 100644
--- a/src/llama_stack_client/types/alpha/agent_list_response.py
+++ b/src/llama_stack_client/types/alpha/agent_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/alpha/agent_retrieve_response.py b/src/llama_stack_client/types/alpha/agent_retrieve_response.py
index 87d79b7b..bcf40e21 100644
--- a/src/llama_stack_client/types/alpha/agent_retrieve_response.py
+++ b/src/llama_stack_client/types/alpha/agent_retrieve_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from datetime import datetime
diff --git a/src/llama_stack_client/types/alpha/agents/__init__.py b/src/llama_stack_client/types/alpha/agents/__init__.py
index 3a144840..f28e38e4 100644
--- a/src/llama_stack_client/types/alpha/agents/__init__.py
+++ b/src/llama_stack_client/types/alpha/agents/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agents/agent_turn_response_stream_chunk.py b/src/llama_stack_client/types/alpha/agents/agent_turn_response_stream_chunk.py
index c45bf756..5a518938 100644
--- a/src/llama_stack_client/types/alpha/agents/agent_turn_response_stream_chunk.py
+++ b/src/llama_stack_client/types/alpha/agents/agent_turn_response_stream_chunk.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ...._models import BaseModel
diff --git a/src/llama_stack_client/types/alpha/agents/session.py b/src/llama_stack_client/types/alpha/agents/session.py
index 9b60853a..c2b3571d 100644
--- a/src/llama_stack_client/types/alpha/agents/session.py
+++ b/src/llama_stack_client/types/alpha/agents/session.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/alpha/agents/session_create_params.py b/src/llama_stack_client/types/alpha/agents/session_create_params.py
index 5f421ae9..e8fb03fa 100644
--- a/src/llama_stack_client/types/alpha/agents/session_create_params.py
+++ b/src/llama_stack_client/types/alpha/agents/session_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agents/session_create_response.py b/src/llama_stack_client/types/alpha/agents/session_create_response.py
index 7d30c61a..dd8b1eba 100644
--- a/src/llama_stack_client/types/alpha/agents/session_create_response.py
+++ b/src/llama_stack_client/types/alpha/agents/session_create_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ...._models import BaseModel
diff --git a/src/llama_stack_client/types/alpha/agents/session_list_params.py b/src/llama_stack_client/types/alpha/agents/session_list_params.py
index 0644d1ae..0ff7609b 100644
--- a/src/llama_stack_client/types/alpha/agents/session_list_params.py
+++ b/src/llama_stack_client/types/alpha/agents/session_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agents/session_list_response.py b/src/llama_stack_client/types/alpha/agents/session_list_response.py
index 23a51baf..ad686bd3 100644
--- a/src/llama_stack_client/types/alpha/agents/session_list_response.py
+++ b/src/llama_stack_client/types/alpha/agents/session_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/alpha/agents/session_retrieve_params.py b/src/llama_stack_client/types/alpha/agents/session_retrieve_params.py
index 116190cc..27bc0761 100644
--- a/src/llama_stack_client/types/alpha/agents/session_retrieve_params.py
+++ b/src/llama_stack_client/types/alpha/agents/session_retrieve_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agents/step_retrieve_response.py b/src/llama_stack_client/types/alpha/agents/step_retrieve_response.py
index 55b64355..300c6ffb 100644
--- a/src/llama_stack_client/types/alpha/agents/step_retrieve_response.py
+++ b/src/llama_stack_client/types/alpha/agents/step_retrieve_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union
diff --git a/src/llama_stack_client/types/alpha/agents/turn.py b/src/llama_stack_client/types/alpha/agents/turn.py
index 74ef22aa..51ec9ddf 100644
--- a/src/llama_stack_client/types/alpha/agents/turn.py
+++ b/src/llama_stack_client/types/alpha/agents/turn.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/alpha/agents/turn_create_params.py b/src/llama_stack_client/types/alpha/agents/turn_create_params.py
index 7225959a..79ee42be 100644
--- a/src/llama_stack_client/types/alpha/agents/turn_create_params.py
+++ b/src/llama_stack_client/types/alpha/agents/turn_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/agents/turn_response_event.py b/src/llama_stack_client/types/alpha/agents/turn_response_event.py
index c162135d..3088e623 100644
--- a/src/llama_stack_client/types/alpha/agents/turn_response_event.py
+++ b/src/llama_stack_client/types/alpha/agents/turn_response_event.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/alpha/agents/turn_resume_params.py b/src/llama_stack_client/types/alpha/agents/turn_resume_params.py
index 554e3578..23fda973 100644
--- a/src/llama_stack_client/types/alpha/agents/turn_resume_params.py
+++ b/src/llama_stack_client/types/alpha/agents/turn_resume_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/algorithm_config_param.py b/src/llama_stack_client/types/alpha/algorithm_config_param.py
index d6da8130..bf3b7d0b 100644
--- a/src/llama_stack_client/types/alpha/algorithm_config_param.py
+++ b/src/llama_stack_client/types/alpha/algorithm_config_param.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/benchmark.py b/src/llama_stack_client/types/alpha/benchmark.py
similarity index 75%
rename from src/llama_stack_client/types/benchmark.py
rename to src/llama_stack_client/types/alpha/benchmark.py
index eb6dde75..b70c8f28 100644
--- a/src/llama_stack_client/types/benchmark.py
+++ b/src/llama_stack_client/types/alpha/benchmark.py
@@ -1,9 +1,15 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
from typing_extensions import Literal
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["Benchmark"]
diff --git a/src/llama_stack_client/types/alpha/benchmark_config_param.py b/src/llama_stack_client/types/alpha/benchmark_config_param.py
index 4a3ea512..e32cd187 100644
--- a/src/llama_stack_client/types/alpha/benchmark_config_param.py
+++ b/src/llama_stack_client/types/alpha/benchmark_config_param.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/benchmark_list_response.py b/src/llama_stack_client/types/alpha/benchmark_list_response.py
similarity index 57%
rename from src/llama_stack_client/types/benchmark_list_response.py
rename to src/llama_stack_client/types/alpha/benchmark_list_response.py
index b2e8ad2b..56d7d8ba 100644
--- a/src/llama_stack_client/types/benchmark_list_response.py
+++ b/src/llama_stack_client/types/alpha/benchmark_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/benchmark_register_params.py b/src/llama_stack_client/types/alpha/benchmark_register_params.py
similarity index 79%
rename from src/llama_stack_client/types/benchmark_register_params.py
rename to src/llama_stack_client/types/alpha/benchmark_register_params.py
index 322e2da8..84be3786 100644
--- a/src/llama_stack_client/types/benchmark_register_params.py
+++ b/src/llama_stack_client/types/alpha/benchmark_register_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -5,7 +11,7 @@
from typing import Dict, Union, Iterable
from typing_extensions import Required, TypedDict
-from .._types import SequenceNotStr
+from ..._types import SequenceNotStr
__all__ = ["BenchmarkRegisterParams"]
diff --git a/src/llama_stack_client/types/alpha/eval/__init__.py b/src/llama_stack_client/types/alpha/eval/__init__.py
index f8ee8b14..d14ed874 100644
--- a/src/llama_stack_client/types/alpha/eval/__init__.py
+++ b/src/llama_stack_client/types/alpha/eval/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/eval_evaluate_rows_alpha_params.py b/src/llama_stack_client/types/alpha/eval_evaluate_rows_alpha_params.py
index 0422e224..36036ff9 100644
--- a/src/llama_stack_client/types/alpha/eval_evaluate_rows_alpha_params.py
+++ b/src/llama_stack_client/types/alpha/eval_evaluate_rows_alpha_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/eval_evaluate_rows_params.py b/src/llama_stack_client/types/alpha/eval_evaluate_rows_params.py
index 4ff9bd5b..3aba96a2 100644
--- a/src/llama_stack_client/types/alpha/eval_evaluate_rows_params.py
+++ b/src/llama_stack_client/types/alpha/eval_evaluate_rows_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/eval_run_eval_alpha_params.py b/src/llama_stack_client/types/alpha/eval_run_eval_alpha_params.py
index e07393b3..760f9dc6 100644
--- a/src/llama_stack_client/types/alpha/eval_run_eval_alpha_params.py
+++ b/src/llama_stack_client/types/alpha/eval_run_eval_alpha_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/eval_run_eval_params.py b/src/llama_stack_client/types/alpha/eval_run_eval_params.py
index 33596fc2..bb166ba3 100644
--- a/src/llama_stack_client/types/alpha/eval_run_eval_params.py
+++ b/src/llama_stack_client/types/alpha/eval_run_eval_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/evaluate_response.py b/src/llama_stack_client/types/alpha/evaluate_response.py
index 4cd2e0f7..69d310ef 100644
--- a/src/llama_stack_client/types/alpha/evaluate_response.py
+++ b/src/llama_stack_client/types/alpha/evaluate_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union
diff --git a/src/llama_stack_client/types/alpha/inference_rerank_params.py b/src/llama_stack_client/types/alpha/inference_rerank_params.py
index 4c506240..6502c3d4 100644
--- a/src/llama_stack_client/types/alpha/inference_rerank_params.py
+++ b/src/llama_stack_client/types/alpha/inference_rerank_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/inference_rerank_response.py b/src/llama_stack_client/types/alpha/inference_rerank_response.py
index 391f8a3b..f2cd133c 100644
--- a/src/llama_stack_client/types/alpha/inference_rerank_response.py
+++ b/src/llama_stack_client/types/alpha/inference_rerank_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/alpha/inference_step.py b/src/llama_stack_client/types/alpha/inference_step.py
index a7e446d1..a4dfa054 100644
--- a/src/llama_stack_client/types/alpha/inference_step.py
+++ b/src/llama_stack_client/types/alpha/inference_step.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
diff --git a/src/llama_stack_client/types/alpha/job.py b/src/llama_stack_client/types/alpha/job.py
index 23506692..696eba85 100644
--- a/src/llama_stack_client/types/alpha/job.py
+++ b/src/llama_stack_client/types/alpha/job.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/list_benchmarks_response.py b/src/llama_stack_client/types/alpha/list_benchmarks_response.py
similarity index 52%
rename from src/llama_stack_client/types/list_benchmarks_response.py
rename to src/llama_stack_client/types/alpha/list_benchmarks_response.py
index f265f130..8ea3b963 100644
--- a/src/llama_stack_client/types/list_benchmarks_response.py
+++ b/src/llama_stack_client/types/alpha/list_benchmarks_response.py
@@ -1,6 +1,12 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from .._models import BaseModel
+from ..._models import BaseModel
from .benchmark_list_response import BenchmarkListResponse
__all__ = ["ListBenchmarksResponse"]
diff --git a/src/llama_stack_client/types/alpha/list_post_training_jobs_response.py b/src/llama_stack_client/types/alpha/list_post_training_jobs_response.py
index 7af3bd96..6c87bcd7 100644
--- a/src/llama_stack_client/types/alpha/list_post_training_jobs_response.py
+++ b/src/llama_stack_client/types/alpha/list_post_training_jobs_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ..._models import BaseModel
diff --git a/src/llama_stack_client/types/alpha/memory_retrieval_step.py b/src/llama_stack_client/types/alpha/memory_retrieval_step.py
index 3d44dee0..727c0ec0 100644
--- a/src/llama_stack_client/types/alpha/memory_retrieval_step.py
+++ b/src/llama_stack_client/types/alpha/memory_retrieval_step.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
diff --git a/src/llama_stack_client/types/alpha/post_training/__init__.py b/src/llama_stack_client/types/alpha/post_training/__init__.py
index d5472d43..8b609eaa 100644
--- a/src/llama_stack_client/types/alpha/post_training/__init__.py
+++ b/src/llama_stack_client/types/alpha/post_training/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/post_training/job_artifacts_params.py b/src/llama_stack_client/types/alpha/post_training/job_artifacts_params.py
index 851ebf5f..e18e76e0 100644
--- a/src/llama_stack_client/types/alpha/post_training/job_artifacts_params.py
+++ b/src/llama_stack_client/types/alpha/post_training/job_artifacts_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/post_training/job_artifacts_response.py b/src/llama_stack_client/types/alpha/post_training/job_artifacts_response.py
index 74edff26..508ba75d 100644
--- a/src/llama_stack_client/types/alpha/post_training/job_artifacts_response.py
+++ b/src/llama_stack_client/types/alpha/post_training/job_artifacts_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/alpha/post_training/job_cancel_params.py b/src/llama_stack_client/types/alpha/post_training/job_cancel_params.py
index 3a976e87..fc1f9a32 100644
--- a/src/llama_stack_client/types/alpha/post_training/job_cancel_params.py
+++ b/src/llama_stack_client/types/alpha/post_training/job_cancel_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/post_training/job_list_response.py b/src/llama_stack_client/types/alpha/post_training/job_list_response.py
index 33bd89f1..95b5d7c5 100644
--- a/src/llama_stack_client/types/alpha/post_training/job_list_response.py
+++ b/src/llama_stack_client/types/alpha/post_training/job_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/alpha/post_training/job_status_params.py b/src/llama_stack_client/types/alpha/post_training/job_status_params.py
index d5e040e0..5b832347 100644
--- a/src/llama_stack_client/types/alpha/post_training/job_status_params.py
+++ b/src/llama_stack_client/types/alpha/post_training/job_status_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/post_training/job_status_response.py b/src/llama_stack_client/types/alpha/post_training/job_status_response.py
index 1ccc9ca2..cfe9d54f 100644
--- a/src/llama_stack_client/types/alpha/post_training/job_status_response.py
+++ b/src/llama_stack_client/types/alpha/post_training/job_status_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/alpha/post_training_job.py b/src/llama_stack_client/types/alpha/post_training_job.py
index 7d9417db..5d3a5391 100644
--- a/src/llama_stack_client/types/alpha/post_training_job.py
+++ b/src/llama_stack_client/types/alpha/post_training_job.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ..._models import BaseModel
diff --git a/src/llama_stack_client/types/alpha/post_training_preference_optimize_params.py b/src/llama_stack_client/types/alpha/post_training_preference_optimize_params.py
index 2dcd294d..35c9e023 100644
--- a/src/llama_stack_client/types/alpha/post_training_preference_optimize_params.py
+++ b/src/llama_stack_client/types/alpha/post_training_preference_optimize_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/post_training_supervised_fine_tune_params.py b/src/llama_stack_client/types/alpha/post_training_supervised_fine_tune_params.py
index c23796f0..dfdc68e8 100644
--- a/src/llama_stack_client/types/alpha/post_training_supervised_fine_tune_params.py
+++ b/src/llama_stack_client/types/alpha/post_training_supervised_fine_tune_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/alpha/shield_call_step.py b/src/llama_stack_client/types/alpha/shield_call_step.py
index 80176555..f332a4d5 100644
--- a/src/llama_stack_client/types/alpha/shield_call_step.py
+++ b/src/llama_stack_client/types/alpha/shield_call_step.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
diff --git a/src/llama_stack_client/types/alpha/tool_execution_step.py b/src/llama_stack_client/types/alpha/tool_execution_step.py
index 1761e889..04259318 100644
--- a/src/llama_stack_client/types/alpha/tool_execution_step.py
+++ b/src/llama_stack_client/types/alpha/tool_execution_step.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/alpha/tool_response.py b/src/llama_stack_client/types/alpha/tool_response.py
index fb749f75..250ae9de 100644
--- a/src/llama_stack_client/types/alpha/tool_response.py
+++ b/src/llama_stack_client/types/alpha/tool_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/alpha/tool_response_param.py b/src/llama_stack_client/types/alpha/tool_response_param.py
index e833211f..9d745da5 100644
--- a/src/llama_stack_client/types/alpha/tool_response_param.py
+++ b/src/llama_stack_client/types/alpha/tool_response_param.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/beta/__init__.py b/src/llama_stack_client/types/beta/__init__.py
new file mode 100644
index 00000000..aab8d1b8
--- /dev/null
+++ b/src/llama_stack_client/types/beta/__init__.py
@@ -0,0 +1,12 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from .dataset_list_response import DatasetListResponse as DatasetListResponse
+from .list_datasets_response import ListDatasetsResponse as ListDatasetsResponse
+from .dataset_iterrows_params import DatasetIterrowsParams as DatasetIterrowsParams
+from .dataset_register_params import DatasetRegisterParams as DatasetRegisterParams
+from .dataset_appendrows_params import DatasetAppendrowsParams as DatasetAppendrowsParams
+from .dataset_iterrows_response import DatasetIterrowsResponse as DatasetIterrowsResponse
+from .dataset_register_response import DatasetRegisterResponse as DatasetRegisterResponse
+from .dataset_retrieve_response import DatasetRetrieveResponse as DatasetRetrieveResponse
diff --git a/src/llama_stack_client/types/dataset_appendrows_params.py b/src/llama_stack_client/types/beta/dataset_appendrows_params.py
similarity index 69%
rename from src/llama_stack_client/types/dataset_appendrows_params.py
rename to src/llama_stack_client/types/beta/dataset_appendrows_params.py
index 2e96e124..b929d790 100644
--- a/src/llama_stack_client/types/dataset_appendrows_params.py
+++ b/src/llama_stack_client/types/beta/dataset_appendrows_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/dataset_iterrows_params.py b/src/llama_stack_client/types/beta/dataset_iterrows_params.py
similarity index 66%
rename from src/llama_stack_client/types/dataset_iterrows_params.py
rename to src/llama_stack_client/types/beta/dataset_iterrows_params.py
index 99065312..262e0e3f 100644
--- a/src/llama_stack_client/types/dataset_iterrows_params.py
+++ b/src/llama_stack_client/types/beta/dataset_iterrows_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/dataset_iterrows_response.py b/src/llama_stack_client/types/beta/dataset_iterrows_response.py
similarity index 68%
rename from src/llama_stack_client/types/dataset_iterrows_response.py
rename to src/llama_stack_client/types/beta/dataset_iterrows_response.py
index 8681b018..5b23d46d 100644
--- a/src/llama_stack_client/types/dataset_iterrows_response.py
+++ b/src/llama_stack_client/types/beta/dataset_iterrows_response.py
@@ -1,8 +1,14 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["DatasetIterrowsResponse"]
diff --git a/src/llama_stack_client/types/dataset_list_response.py b/src/llama_stack_client/types/beta/dataset_list_response.py
similarity index 87%
rename from src/llama_stack_client/types/dataset_list_response.py
rename to src/llama_stack_client/types/beta/dataset_list_response.py
index 7080e589..7e6c1141 100644
--- a/src/llama_stack_client/types/dataset_list_response.py
+++ b/src/llama_stack_client/types/beta/dataset_list_response.py
@@ -1,10 +1,16 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
from typing_extensions import Literal, Annotated, TypeAlias
-from .._utils import PropertyInfo
-from .._models import BaseModel
+from ..._utils import PropertyInfo
+from ..._models import BaseModel
__all__ = [
"DatasetListResponse",
diff --git a/src/llama_stack_client/types/dataset_register_params.py b/src/llama_stack_client/types/beta/dataset_register_params.py
similarity index 93%
rename from src/llama_stack_client/types/dataset_register_params.py
rename to src/llama_stack_client/types/beta/dataset_register_params.py
index 6fd5db3f..75803a8a 100644
--- a/src/llama_stack_client/types/dataset_register_params.py
+++ b/src/llama_stack_client/types/beta/dataset_register_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/dataset_register_response.py b/src/llama_stack_client/types/beta/dataset_register_response.py
similarity index 85%
rename from src/llama_stack_client/types/dataset_register_response.py
rename to src/llama_stack_client/types/beta/dataset_register_response.py
index 8da590b8..e9bb82d2 100644
--- a/src/llama_stack_client/types/dataset_register_response.py
+++ b/src/llama_stack_client/types/beta/dataset_register_response.py
@@ -1,10 +1,16 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
from typing_extensions import Literal, Annotated, TypeAlias
-from .._utils import PropertyInfo
-from .._models import BaseModel
+from ..._utils import PropertyInfo
+from ..._models import BaseModel
__all__ = ["DatasetRegisterResponse", "Source", "SourceUriDataSource", "SourceRowsDataSource"]
diff --git a/src/llama_stack_client/types/dataset_retrieve_response.py b/src/llama_stack_client/types/beta/dataset_retrieve_response.py
similarity index 85%
rename from src/llama_stack_client/types/dataset_retrieve_response.py
rename to src/llama_stack_client/types/beta/dataset_retrieve_response.py
index 6cda0a42..3358288d 100644
--- a/src/llama_stack_client/types/dataset_retrieve_response.py
+++ b/src/llama_stack_client/types/beta/dataset_retrieve_response.py
@@ -1,10 +1,16 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
from typing_extensions import Literal, Annotated, TypeAlias
-from .._utils import PropertyInfo
-from .._models import BaseModel
+from ..._utils import PropertyInfo
+from ..._models import BaseModel
__all__ = ["DatasetRetrieveResponse", "Source", "SourceUriDataSource", "SourceRowsDataSource"]
diff --git a/src/llama_stack_client/types/list_datasets_response.py b/src/llama_stack_client/types/beta/list_datasets_response.py
similarity index 54%
rename from src/llama_stack_client/types/list_datasets_response.py
rename to src/llama_stack_client/types/beta/list_datasets_response.py
index 21c4b72a..4f71ae16 100644
--- a/src/llama_stack_client/types/list_datasets_response.py
+++ b/src/llama_stack_client/types/beta/list_datasets_response.py
@@ -1,6 +1,12 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from .._models import BaseModel
+from ..._models import BaseModel
from .dataset_list_response import DatasetListResponse
__all__ = ["ListDatasetsResponse"]
diff --git a/src/llama_stack_client/types/chat/__init__.py b/src/llama_stack_client/types/chat/__init__.py
index 27720e7f..43f0751f 100644
--- a/src/llama_stack_client/types/chat/__init__.py
+++ b/src/llama_stack_client/types/chat/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/chat/completion_create_params.py b/src/llama_stack_client/types/chat/completion_create_params.py
index 1b930dfe..fb188ac1 100644
--- a/src/llama_stack_client/types/chat/completion_create_params.py
+++ b/src/llama_stack_client/types/chat/completion_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/chat/completion_create_response.py b/src/llama_stack_client/types/chat/completion_create_response.py
index 8e723db3..1aa5dc17 100644
--- a/src/llama_stack_client/types/chat/completion_create_response.py
+++ b/src/llama_stack_client/types/chat/completion_create_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/chat/completion_list_params.py b/src/llama_stack_client/types/chat/completion_list_params.py
index 5fb77c2c..71f77f90 100644
--- a/src/llama_stack_client/types/chat/completion_list_params.py
+++ b/src/llama_stack_client/types/chat/completion_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/chat/completion_list_response.py b/src/llama_stack_client/types/chat/completion_list_response.py
index 6be72a0a..059c89c0 100644
--- a/src/llama_stack_client/types/chat/completion_list_response.py
+++ b/src/llama_stack_client/types/chat/completion_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/chat/completion_retrieve_response.py b/src/llama_stack_client/types/chat/completion_retrieve_response.py
index e71e1175..19a03d7f 100644
--- a/src/llama_stack_client/types/chat/completion_retrieve_response.py
+++ b/src/llama_stack_client/types/chat/completion_retrieve_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/chat_completion_chunk.py b/src/llama_stack_client/types/chat_completion_chunk.py
index 7a39e616..866601ca 100644
--- a/src/llama_stack_client/types/chat_completion_chunk.py
+++ b/src/llama_stack_client/types/chat_completion_chunk.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/completion_create_params.py b/src/llama_stack_client/types/completion_create_params.py
index 52acd356..0a651fb6 100644
--- a/src/llama_stack_client/types/completion_create_params.py
+++ b/src/llama_stack_client/types/completion_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/completion_create_response.py b/src/llama_stack_client/types/completion_create_response.py
index 0c43e68a..fa88f12a 100644
--- a/src/llama_stack_client/types/completion_create_response.py
+++ b/src/llama_stack_client/types/completion_create_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/conversation_create_params.py b/src/llama_stack_client/types/conversation_create_params.py
index 30a27324..c51245dd 100644
--- a/src/llama_stack_client/types/conversation_create_params.py
+++ b/src/llama_stack_client/types/conversation_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -15,15 +21,20 @@
"ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"ItemOpenAIResponseMessageContentUnionMember2",
- "ItemOpenAIResponseMessageContentUnionMember2Annotation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "ItemOpenAIResponseOutputMessageFunctionToolCall",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
+ "ItemOpenAIResponseOutputMessageWebSearchToolCall",
"ItemOpenAIResponseOutputMessageFileSearchToolCall",
"ItemOpenAIResponseOutputMessageFileSearchToolCallResult",
- "ItemOpenAIResponseOutputMessageWebSearchToolCall",
+ "ItemOpenAIResponseOutputMessageFunctionToolCall",
+ "ItemOpenAIResponseInputFunctionToolCallOutput",
+ "ItemOpenAIResponseMcpApprovalRequest",
+ "ItemOpenAIResponseMcpApprovalResponse",
"ItemOpenAIResponseOutputMessageMcpCall",
"ItemOpenAIResponseOutputMessageMcpListTools",
"ItemOpenAIResponseOutputMessageMcpListToolsTool",
@@ -63,7 +74,7 @@ class ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont
]
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
TypedDict, total=False
):
file_id: Required[str]
@@ -79,7 +90,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""Annotation type identifier, always "file_citation" """
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(TypedDict, total=False):
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ TypedDict, total=False
+):
end_index: Required[int]
"""End position of the citation span in the content"""
@@ -96,7 +109,7 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""URL of the referenced web resource"""
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
TypedDict, total=False
):
container_id: Required[str]
@@ -112,7 +125,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Required[Literal["container_file_citation"]]
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(TypedDict, total=False):
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ TypedDict, total=False
+):
file_id: Required[str]
index: Required[int]
@@ -120,22 +135,38 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Required[Literal["file_path"]]
-ItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Union[
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Union[
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
]
-class ItemOpenAIResponseMessageContentUnionMember2(TypedDict, total=False):
- annotations: Required[Iterable[ItemOpenAIResponseMessageContentUnionMember2Annotation]]
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(TypedDict, total=False):
+ annotations: Required[
+ Iterable[ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation]
+ ]
text: Required[str]
type: Required[Literal["output_text"]]
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(TypedDict, total=False):
+ refusal: Required[str]
+ """Refusal text supplied by the model"""
+
+ type: Required[Literal["refusal"]]
+ """Content part type identifier, always "refusal" """
+
+
+ItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Union[
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+]
+
+
class ItemOpenAIResponseMessage(TypedDict, total=False):
content: Required[
Union[
@@ -154,24 +185,15 @@ class ItemOpenAIResponseMessage(TypedDict, total=False):
status: str
-class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False):
- arguments: Required[str]
- """JSON string containing the function arguments"""
-
- call_id: Required[str]
- """Unique identifier for the function call"""
-
- name: Required[str]
- """Name of the function being called"""
-
- type: Required[Literal["function_call"]]
- """Tool call type identifier, always "function_call" """
+class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False):
+ id: Required[str]
+ """Unique identifier for this tool call"""
- id: str
- """(Optional) Additional identifier for the tool call"""
+ status: Required[str]
+ """Current status of the web search operation"""
- status: str
- """(Optional) Current status of the function call execution"""
+ type: Required[Literal["web_search_call"]]
+ """Tool call type identifier, always "web_search_call" """
class ItemOpenAIResponseOutputMessageFileSearchToolCallResult(TypedDict, total=False):
@@ -208,15 +230,60 @@ class ItemOpenAIResponseOutputMessageFileSearchToolCall(TypedDict, total=False):
"""(Optional) Search results returned by the file search operation"""
-class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False):
+class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False):
+ arguments: Required[str]
+ """JSON string containing the function arguments"""
+
+ call_id: Required[str]
+ """Unique identifier for the function call"""
+
+ name: Required[str]
+ """Name of the function being called"""
+
+ type: Required[Literal["function_call"]]
+ """Tool call type identifier, always "function_call" """
+
+ id: str
+ """(Optional) Additional identifier for the tool call"""
+
+ status: str
+ """(Optional) Current status of the function call execution"""
+
+
+class ItemOpenAIResponseInputFunctionToolCallOutput(TypedDict, total=False):
+ call_id: Required[str]
+
+ output: Required[str]
+
+ type: Required[Literal["function_call_output"]]
+
+ id: str
+
+ status: str
+
+
+class ItemOpenAIResponseMcpApprovalRequest(TypedDict, total=False):
id: Required[str]
- """Unique identifier for this tool call"""
- status: Required[str]
- """Current status of the web search operation"""
+ arguments: Required[str]
- type: Required[Literal["web_search_call"]]
- """Tool call type identifier, always "web_search_call" """
+ name: Required[str]
+
+ server_label: Required[str]
+
+ type: Required[Literal["mcp_approval_request"]]
+
+
+class ItemOpenAIResponseMcpApprovalResponse(TypedDict, total=False):
+ approval_request_id: Required[str]
+
+ approve: Required[bool]
+
+ type: Required[Literal["mcp_approval_response"]]
+
+ id: str
+
+ reason: str
class ItemOpenAIResponseOutputMessageMcpCall(TypedDict, total=False):
@@ -269,9 +336,12 @@ class ItemOpenAIResponseOutputMessageMcpListTools(TypedDict, total=False):
Item: TypeAlias = Union[
ItemOpenAIResponseMessage,
- ItemOpenAIResponseOutputMessageFunctionToolCall,
- ItemOpenAIResponseOutputMessageFileSearchToolCall,
ItemOpenAIResponseOutputMessageWebSearchToolCall,
+ ItemOpenAIResponseOutputMessageFileSearchToolCall,
+ ItemOpenAIResponseOutputMessageFunctionToolCall,
+ ItemOpenAIResponseInputFunctionToolCallOutput,
+ ItemOpenAIResponseMcpApprovalRequest,
+ ItemOpenAIResponseMcpApprovalResponse,
ItemOpenAIResponseOutputMessageMcpCall,
ItemOpenAIResponseOutputMessageMcpListTools,
]
diff --git a/src/llama_stack_client/types/conversation_delete_response.py b/src/llama_stack_client/types/conversation_delete_response.py
index 2b675a69..1deb8eaa 100644
--- a/src/llama_stack_client/types/conversation_delete_response.py
+++ b/src/llama_stack_client/types/conversation_delete_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/conversation_object.py b/src/llama_stack_client/types/conversation_object.py
index 617be127..e1167f9c 100644
--- a/src/llama_stack_client/types/conversation_object.py
+++ b/src/llama_stack_client/types/conversation_object.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import builtins
diff --git a/src/llama_stack_client/types/conversation_update_params.py b/src/llama_stack_client/types/conversation_update_params.py
index 3b34c327..d59a9d34 100644
--- a/src/llama_stack_client/types/conversation_update_params.py
+++ b/src/llama_stack_client/types/conversation_update_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/conversations/__init__.py b/src/llama_stack_client/types/conversations/__init__.py
index 88543e32..4c6632e4 100644
--- a/src/llama_stack_client/types/conversations/__init__.py
+++ b/src/llama_stack_client/types/conversations/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/conversations/item_create_params.py b/src/llama_stack_client/types/conversations/item_create_params.py
index 43176c98..8df31144 100644
--- a/src/llama_stack_client/types/conversations/item_create_params.py
+++ b/src/llama_stack_client/types/conversations/item_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -15,15 +21,20 @@
"ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"ItemOpenAIResponseMessageContentUnionMember2",
- "ItemOpenAIResponseMessageContentUnionMember2Annotation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "ItemOpenAIResponseOutputMessageFunctionToolCall",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
+ "ItemOpenAIResponseOutputMessageWebSearchToolCall",
"ItemOpenAIResponseOutputMessageFileSearchToolCall",
"ItemOpenAIResponseOutputMessageFileSearchToolCallResult",
- "ItemOpenAIResponseOutputMessageWebSearchToolCall",
+ "ItemOpenAIResponseOutputMessageFunctionToolCall",
+ "ItemOpenAIResponseInputFunctionToolCallOutput",
+ "ItemOpenAIResponseMcpApprovalRequest",
+ "ItemOpenAIResponseMcpApprovalResponse",
"ItemOpenAIResponseOutputMessageMcpCall",
"ItemOpenAIResponseOutputMessageMcpListTools",
"ItemOpenAIResponseOutputMessageMcpListToolsTool",
@@ -60,7 +71,7 @@ class ItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont
]
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
TypedDict, total=False
):
file_id: Required[str]
@@ -76,7 +87,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""Annotation type identifier, always "file_citation" """
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(TypedDict, total=False):
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ TypedDict, total=False
+):
end_index: Required[int]
"""End position of the citation span in the content"""
@@ -93,7 +106,7 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""URL of the referenced web resource"""
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
TypedDict, total=False
):
container_id: Required[str]
@@ -109,7 +122,9 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Required[Literal["container_file_citation"]]
-class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(TypedDict, total=False):
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ TypedDict, total=False
+):
file_id: Required[str]
index: Required[int]
@@ -117,22 +132,38 @@ class ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Required[Literal["file_path"]]
-ItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Union[
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- ItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Union[
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
]
-class ItemOpenAIResponseMessageContentUnionMember2(TypedDict, total=False):
- annotations: Required[Iterable[ItemOpenAIResponseMessageContentUnionMember2Annotation]]
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(TypedDict, total=False):
+ annotations: Required[
+ Iterable[ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation]
+ ]
text: Required[str]
type: Required[Literal["output_text"]]
+class ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(TypedDict, total=False):
+ refusal: Required[str]
+ """Refusal text supplied by the model"""
+
+ type: Required[Literal["refusal"]]
+ """Content part type identifier, always "refusal" """
+
+
+ItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Union[
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ ItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+]
+
+
class ItemOpenAIResponseMessage(TypedDict, total=False):
content: Required[
Union[
@@ -151,24 +182,15 @@ class ItemOpenAIResponseMessage(TypedDict, total=False):
status: str
-class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False):
- arguments: Required[str]
- """JSON string containing the function arguments"""
-
- call_id: Required[str]
- """Unique identifier for the function call"""
-
- name: Required[str]
- """Name of the function being called"""
-
- type: Required[Literal["function_call"]]
- """Tool call type identifier, always "function_call" """
+class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False):
+ id: Required[str]
+ """Unique identifier for this tool call"""
- id: str
- """(Optional) Additional identifier for the tool call"""
+ status: Required[str]
+ """Current status of the web search operation"""
- status: str
- """(Optional) Current status of the function call execution"""
+ type: Required[Literal["web_search_call"]]
+ """Tool call type identifier, always "web_search_call" """
class ItemOpenAIResponseOutputMessageFileSearchToolCallResult(TypedDict, total=False):
@@ -205,15 +227,60 @@ class ItemOpenAIResponseOutputMessageFileSearchToolCall(TypedDict, total=False):
"""(Optional) Search results returned by the file search operation"""
-class ItemOpenAIResponseOutputMessageWebSearchToolCall(TypedDict, total=False):
+class ItemOpenAIResponseOutputMessageFunctionToolCall(TypedDict, total=False):
+ arguments: Required[str]
+ """JSON string containing the function arguments"""
+
+ call_id: Required[str]
+ """Unique identifier for the function call"""
+
+ name: Required[str]
+ """Name of the function being called"""
+
+ type: Required[Literal["function_call"]]
+ """Tool call type identifier, always "function_call" """
+
+ id: str
+ """(Optional) Additional identifier for the tool call"""
+
+ status: str
+ """(Optional) Current status of the function call execution"""
+
+
+class ItemOpenAIResponseInputFunctionToolCallOutput(TypedDict, total=False):
+ call_id: Required[str]
+
+ output: Required[str]
+
+ type: Required[Literal["function_call_output"]]
+
+ id: str
+
+ status: str
+
+
+class ItemOpenAIResponseMcpApprovalRequest(TypedDict, total=False):
id: Required[str]
- """Unique identifier for this tool call"""
- status: Required[str]
- """Current status of the web search operation"""
+ arguments: Required[str]
- type: Required[Literal["web_search_call"]]
- """Tool call type identifier, always "web_search_call" """
+ name: Required[str]
+
+ server_label: Required[str]
+
+ type: Required[Literal["mcp_approval_request"]]
+
+
+class ItemOpenAIResponseMcpApprovalResponse(TypedDict, total=False):
+ approval_request_id: Required[str]
+
+ approve: Required[bool]
+
+ type: Required[Literal["mcp_approval_response"]]
+
+ id: str
+
+ reason: str
class ItemOpenAIResponseOutputMessageMcpCall(TypedDict, total=False):
@@ -266,9 +333,12 @@ class ItemOpenAIResponseOutputMessageMcpListTools(TypedDict, total=False):
Item: TypeAlias = Union[
ItemOpenAIResponseMessage,
- ItemOpenAIResponseOutputMessageFunctionToolCall,
- ItemOpenAIResponseOutputMessageFileSearchToolCall,
ItemOpenAIResponseOutputMessageWebSearchToolCall,
+ ItemOpenAIResponseOutputMessageFileSearchToolCall,
+ ItemOpenAIResponseOutputMessageFunctionToolCall,
+ ItemOpenAIResponseInputFunctionToolCallOutput,
+ ItemOpenAIResponseMcpApprovalRequest,
+ ItemOpenAIResponseMcpApprovalResponse,
ItemOpenAIResponseOutputMessageMcpCall,
ItemOpenAIResponseOutputMessageMcpListTools,
]
diff --git a/src/llama_stack_client/types/conversations/item_create_response.py b/src/llama_stack_client/types/conversations/item_create_response.py
index d998dda5..c382e2b9 100644
--- a/src/llama_stack_client/types/conversations/item_create_response.py
+++ b/src/llama_stack_client/types/conversations/item_create_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -14,15 +20,20 @@
"DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"DataOpenAIResponseMessageContentUnionMember2",
- "DataOpenAIResponseMessageContentUnionMember2Annotation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "DataOpenAIResponseOutputMessageFunctionToolCall",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
+ "DataOpenAIResponseOutputMessageWebSearchToolCall",
"DataOpenAIResponseOutputMessageFileSearchToolCall",
"DataOpenAIResponseOutputMessageFileSearchToolCallResult",
- "DataOpenAIResponseOutputMessageWebSearchToolCall",
+ "DataOpenAIResponseOutputMessageFunctionToolCall",
+ "DataOpenAIResponseInputFunctionToolCallOutput",
+ "DataOpenAIResponseMcpApprovalRequest",
+ "DataOpenAIResponseMcpApprovalResponse",
"DataOpenAIResponseOutputMessageMcpCall",
"DataOpenAIResponseOutputMessageMcpListTools",
"DataOpenAIResponseOutputMessageMcpListToolsTool",
@@ -57,7 +68,9 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont
]
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -71,7 +84,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""Annotation type identifier, always "file_citation" """
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -88,7 +103,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""URL of the referenced web resource"""
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -102,7 +119,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Literal["container_file_citation"]
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -110,25 +129,44 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Literal["file_path"]
-DataOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class DataOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[DataOpenAIResponseMessageContentUnionMember2Annotation]
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
+ ]
text: str
type: Literal["output_text"]
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+DataOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class DataOpenAIResponseMessage(BaseModel):
content: Union[
str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2]
@@ -143,24 +181,15 @@ class DataOpenAIResponseMessage(BaseModel):
status: Optional[str] = None
-class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
- arguments: str
- """JSON string containing the function arguments"""
-
- call_id: str
- """Unique identifier for the function call"""
-
- name: str
- """Name of the function being called"""
-
- type: Literal["function_call"]
- """Tool call type identifier, always "function_call" """
+class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+ id: str
+ """Unique identifier for this tool call"""
- id: Optional[str] = None
- """(Optional) Additional identifier for the tool call"""
+ status: str
+ """Current status of the web search operation"""
- status: Optional[str] = None
- """(Optional) Current status of the function call execution"""
+ type: Literal["web_search_call"]
+ """Tool call type identifier, always "web_search_call" """
class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel):
@@ -197,15 +226,60 @@ class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
"""(Optional) Search results returned by the file search operation"""
-class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
+ arguments: str
+ """JSON string containing the function arguments"""
+
+ call_id: str
+ """Unique identifier for the function call"""
+
+ name: str
+ """Name of the function being called"""
+
+ type: Literal["function_call"]
+ """Tool call type identifier, always "function_call" """
+
+ id: Optional[str] = None
+ """(Optional) Additional identifier for the tool call"""
+
+ status: Optional[str] = None
+ """(Optional) Current status of the function call execution"""
+
+
+class DataOpenAIResponseInputFunctionToolCallOutput(BaseModel):
+ call_id: str
+
+ output: str
+
+ type: Literal["function_call_output"]
+
+ id: Optional[str] = None
+
+ status: Optional[str] = None
+
+
+class DataOpenAIResponseMcpApprovalRequest(BaseModel):
id: str
- """Unique identifier for this tool call"""
- status: str
- """Current status of the web search operation"""
+ arguments: str
- type: Literal["web_search_call"]
- """Tool call type identifier, always "web_search_call" """
+ name: str
+
+ server_label: str
+
+ type: Literal["mcp_approval_request"]
+
+
+class DataOpenAIResponseMcpApprovalResponse(BaseModel):
+ approval_request_id: str
+
+ approve: bool
+
+ type: Literal["mcp_approval_response"]
+
+ id: Optional[str] = None
+
+ reason: Optional[str] = None
class DataOpenAIResponseOutputMessageMcpCall(BaseModel):
@@ -259,9 +333,12 @@ class DataOpenAIResponseOutputMessageMcpListTools(BaseModel):
Data: TypeAlias = Annotated[
Union[
DataOpenAIResponseMessage,
- DataOpenAIResponseOutputMessageFunctionToolCall,
- DataOpenAIResponseOutputMessageFileSearchToolCall,
DataOpenAIResponseOutputMessageWebSearchToolCall,
+ DataOpenAIResponseOutputMessageFileSearchToolCall,
+ DataOpenAIResponseOutputMessageFunctionToolCall,
+ DataOpenAIResponseInputFunctionToolCallOutput,
+ DataOpenAIResponseMcpApprovalRequest,
+ DataOpenAIResponseMcpApprovalResponse,
DataOpenAIResponseOutputMessageMcpCall,
DataOpenAIResponseOutputMessageMcpListTools,
],
diff --git a/src/llama_stack_client/types/conversations/item_get_response.py b/src/llama_stack_client/types/conversations/item_get_response.py
index 2bcb6c20..9f8d4bda 100644
--- a/src/llama_stack_client/types/conversations/item_get_response.py
+++ b/src/llama_stack_client/types/conversations/item_get_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -13,15 +19,20 @@
"OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"OpenAIResponseMessageContentUnionMember2",
- "OpenAIResponseMessageContentUnionMember2Annotation",
- "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "OpenAIResponseOutputMessageFunctionToolCall",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
+ "OpenAIResponseOutputMessageWebSearchToolCall",
"OpenAIResponseOutputMessageFileSearchToolCall",
"OpenAIResponseOutputMessageFileSearchToolCallResult",
- "OpenAIResponseOutputMessageWebSearchToolCall",
+ "OpenAIResponseOutputMessageFunctionToolCall",
+ "OpenAIResponseInputFunctionToolCallOutput",
+ "OpenAIResponseMcpApprovalRequest",
+ "OpenAIResponseMcpApprovalResponse",
"OpenAIResponseOutputMessageMcpCall",
"OpenAIResponseOutputMessageMcpListTools",
"OpenAIResponseOutputMessageMcpListToolsTool",
@@ -56,7 +67,9 @@ class OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentI
]
-class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -70,7 +83,9 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation
"""Annotation type identifier, always "file_citation" """
-class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -87,7 +102,9 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation
"""URL of the referenced web resource"""
-class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -101,7 +118,9 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation
type: Literal["container_file_citation"]
-class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -109,25 +128,42 @@ class OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotation
type: Literal["file_path"]
-OpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class OpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[OpenAIResponseMessageContentUnionMember2Annotation]
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation]
text: str
type: Literal["output_text"]
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+OpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class OpenAIResponseMessage(BaseModel):
content: Union[str, List[OpenAIResponseMessageContentUnionMember1], List[OpenAIResponseMessageContentUnionMember2]]
@@ -140,24 +176,15 @@ class OpenAIResponseMessage(BaseModel):
status: Optional[str] = None
-class OpenAIResponseOutputMessageFunctionToolCall(BaseModel):
- arguments: str
- """JSON string containing the function arguments"""
-
- call_id: str
- """Unique identifier for the function call"""
-
- name: str
- """Name of the function being called"""
-
- type: Literal["function_call"]
- """Tool call type identifier, always "function_call" """
+class OpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+ id: str
+ """Unique identifier for this tool call"""
- id: Optional[str] = None
- """(Optional) Additional identifier for the tool call"""
+ status: str
+ """Current status of the web search operation"""
- status: Optional[str] = None
- """(Optional) Current status of the function call execution"""
+ type: Literal["web_search_call"]
+ """Tool call type identifier, always "web_search_call" """
class OpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel):
@@ -194,15 +221,60 @@ class OpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
"""(Optional) Search results returned by the file search operation"""
-class OpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+class OpenAIResponseOutputMessageFunctionToolCall(BaseModel):
+ arguments: str
+ """JSON string containing the function arguments"""
+
+ call_id: str
+ """Unique identifier for the function call"""
+
+ name: str
+ """Name of the function being called"""
+
+ type: Literal["function_call"]
+ """Tool call type identifier, always "function_call" """
+
+ id: Optional[str] = None
+ """(Optional) Additional identifier for the tool call"""
+
+ status: Optional[str] = None
+ """(Optional) Current status of the function call execution"""
+
+
+class OpenAIResponseInputFunctionToolCallOutput(BaseModel):
+ call_id: str
+
+ output: str
+
+ type: Literal["function_call_output"]
+
+ id: Optional[str] = None
+
+ status: Optional[str] = None
+
+
+class OpenAIResponseMcpApprovalRequest(BaseModel):
id: str
- """Unique identifier for this tool call"""
- status: str
- """Current status of the web search operation"""
+ arguments: str
- type: Literal["web_search_call"]
- """Tool call type identifier, always "web_search_call" """
+ name: str
+
+ server_label: str
+
+ type: Literal["mcp_approval_request"]
+
+
+class OpenAIResponseMcpApprovalResponse(BaseModel):
+ approval_request_id: str
+
+ approve: bool
+
+ type: Literal["mcp_approval_response"]
+
+ id: Optional[str] = None
+
+ reason: Optional[str] = None
class OpenAIResponseOutputMessageMcpCall(BaseModel):
@@ -256,9 +328,12 @@ class OpenAIResponseOutputMessageMcpListTools(BaseModel):
ItemGetResponse: TypeAlias = Annotated[
Union[
OpenAIResponseMessage,
- OpenAIResponseOutputMessageFunctionToolCall,
- OpenAIResponseOutputMessageFileSearchToolCall,
OpenAIResponseOutputMessageWebSearchToolCall,
+ OpenAIResponseOutputMessageFileSearchToolCall,
+ OpenAIResponseOutputMessageFunctionToolCall,
+ OpenAIResponseInputFunctionToolCallOutput,
+ OpenAIResponseMcpApprovalRequest,
+ OpenAIResponseMcpApprovalResponse,
OpenAIResponseOutputMessageMcpCall,
OpenAIResponseOutputMessageMcpListTools,
],
diff --git a/src/llama_stack_client/types/conversations/item_list_params.py b/src/llama_stack_client/types/conversations/item_list_params.py
index e90c51a0..768a91b7 100644
--- a/src/llama_stack_client/types/conversations/item_list_params.py
+++ b/src/llama_stack_client/types/conversations/item_list_params.py
@@ -1,36 +1,38 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
-from typing import List, Union
-from typing_extensions import Literal, Required, TypedDict
+from typing import List
+from typing_extensions import Literal, TypedDict
__all__ = ["ItemListParams"]
class ItemListParams(TypedDict, total=False):
- after: Required[Union[str, object]]
+ after: str
"""An item ID to list items after, used in pagination."""
- include: Required[
- Union[
- List[
- Literal[
- "code_interpreter_call.outputs",
- "computer_call_output.output.image_url",
- "file_search_call.results",
- "message.input_image.image_url",
- "message.output_text.logprobs",
- "reasoning.encrypted_content",
- ]
- ],
- object,
+ include: List[
+ Literal[
+ "web_search_call.action.sources",
+ "code_interpreter_call.outputs",
+ "computer_call_output.output.image_url",
+ "file_search_call.results",
+ "message.input_image.image_url",
+ "message.output_text.logprobs",
+ "reasoning.encrypted_content",
]
]
"""Specify additional output data to include in the response."""
- limit: Required[Union[int, object]]
+ limit: int
"""A limit on the number of objects to be returned (1-100, default 20)."""
- order: Required[Union[Literal["asc", "desc"], object]]
+ order: Literal["asc", "desc"]
"""The order to return items in (asc or desc, default desc)."""
diff --git a/src/llama_stack_client/types/conversations/item_list_response.py b/src/llama_stack_client/types/conversations/item_list_response.py
index c0c3e06f..b95f56fb 100644
--- a/src/llama_stack_client/types/conversations/item_list_response.py
+++ b/src/llama_stack_client/types/conversations/item_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -8,28 +14,32 @@
__all__ = [
"ItemListResponse",
- "Data",
- "DataOpenAIResponseMessage",
- "DataOpenAIResponseMessageContentUnionMember1",
- "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
- "DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
- "DataOpenAIResponseMessageContentUnionMember2",
- "DataOpenAIResponseMessageContentUnionMember2Annotation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "DataOpenAIResponseOutputMessageFunctionToolCall",
- "DataOpenAIResponseOutputMessageFileSearchToolCall",
- "DataOpenAIResponseOutputMessageFileSearchToolCallResult",
- "DataOpenAIResponseOutputMessageWebSearchToolCall",
- "DataOpenAIResponseOutputMessageMcpCall",
- "DataOpenAIResponseOutputMessageMcpListTools",
- "DataOpenAIResponseOutputMessageMcpListToolsTool",
+ "OpenAIResponseMessage",
+ "OpenAIResponseMessageContentUnionMember1",
+ "OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
+ "OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
+ "OpenAIResponseMessageContentUnionMember2",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
+ "OpenAIResponseOutputMessageWebSearchToolCall",
+ "OpenAIResponseOutputMessageFileSearchToolCall",
+ "OpenAIResponseOutputMessageFileSearchToolCallResult",
+ "OpenAIResponseOutputMessageFunctionToolCall",
+ "OpenAIResponseInputFunctionToolCallOutput",
+ "OpenAIResponseMcpApprovalRequest",
+ "OpenAIResponseMcpApprovalResponse",
+ "OpenAIResponseOutputMessageMcpCall",
+ "OpenAIResponseOutputMessageMcpListTools",
+ "OpenAIResponseOutputMessageMcpListToolsTool",
]
-class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
+class OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
text: str
"""The text content of the input message"""
@@ -37,7 +47,7 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont
"""Content type identifier, always "input_text" """
-class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel):
+class OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel):
detail: Literal["low", "high", "auto"]
"""Level of detail for image processing, can be "low", "high", or "auto" """
@@ -48,16 +58,18 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont
"""(Optional) URL of the image content"""
-DataOpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[
+OpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[
Union[
- DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText,
- DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage,
+ OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText,
+ OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage,
],
PropertyInfo(discriminator="type"),
]
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -71,7 +83,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""Annotation type identifier, always "file_citation" """
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -88,7 +102,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""URL of the referenced web resource"""
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -102,7 +118,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Literal["container_file_citation"]
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -110,29 +128,44 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Literal["file_path"]
-DataOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class DataOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[DataOpenAIResponseMessageContentUnionMember2Annotation]
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation]
text: str
type: Literal["output_text"]
-class DataOpenAIResponseMessage(BaseModel):
- content: Union[
- str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2]
- ]
+class OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+OpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
+class OpenAIResponseMessage(BaseModel):
+ content: Union[str, List[OpenAIResponseMessageContentUnionMember1], List[OpenAIResponseMessageContentUnionMember2]]
role: Literal["system", "developer", "user", "assistant"]
@@ -143,27 +176,18 @@ class DataOpenAIResponseMessage(BaseModel):
status: Optional[str] = None
-class DataOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
- arguments: str
- """JSON string containing the function arguments"""
-
- call_id: str
- """Unique identifier for the function call"""
-
- name: str
- """Name of the function being called"""
-
- type: Literal["function_call"]
- """Tool call type identifier, always "function_call" """
+class OpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+ id: str
+ """Unique identifier for this tool call"""
- id: Optional[str] = None
- """(Optional) Additional identifier for the tool call"""
+ status: str
+ """Current status of the web search operation"""
- status: Optional[str] = None
- """(Optional) Current status of the function call execution"""
+ type: Literal["web_search_call"]
+ """Tool call type identifier, always "web_search_call" """
-class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel):
+class OpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel):
attributes: Dict[str, Union[bool, float, str, List[object], object, None]]
"""(Optional) Key-value attributes associated with the file"""
@@ -180,7 +204,7 @@ class DataOpenAIResponseOutputMessageFileSearchToolCallResult(BaseModel):
"""Text content of the search result"""
-class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
+class OpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
id: str
"""Unique identifier for this tool call"""
@@ -193,22 +217,67 @@ class DataOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
type: Literal["file_search_call"]
"""Tool call type identifier, always "file_search_call" """
- results: Optional[List[DataOpenAIResponseOutputMessageFileSearchToolCallResult]] = None
+ results: Optional[List[OpenAIResponseOutputMessageFileSearchToolCallResult]] = None
"""(Optional) Search results returned by the file search operation"""
-class DataOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+class OpenAIResponseOutputMessageFunctionToolCall(BaseModel):
+ arguments: str
+ """JSON string containing the function arguments"""
+
+ call_id: str
+ """Unique identifier for the function call"""
+
+ name: str
+ """Name of the function being called"""
+
+ type: Literal["function_call"]
+ """Tool call type identifier, always "function_call" """
+
+ id: Optional[str] = None
+ """(Optional) Additional identifier for the tool call"""
+
+ status: Optional[str] = None
+ """(Optional) Current status of the function call execution"""
+
+
+class OpenAIResponseInputFunctionToolCallOutput(BaseModel):
+ call_id: str
+
+ output: str
+
+ type: Literal["function_call_output"]
+
+ id: Optional[str] = None
+
+ status: Optional[str] = None
+
+
+class OpenAIResponseMcpApprovalRequest(BaseModel):
id: str
- """Unique identifier for this tool call"""
- status: str
- """Current status of the web search operation"""
+ arguments: str
- type: Literal["web_search_call"]
- """Tool call type identifier, always "web_search_call" """
+ name: str
+
+ server_label: str
+
+ type: Literal["mcp_approval_request"]
+
+
+class OpenAIResponseMcpApprovalResponse(BaseModel):
+ approval_request_id: str
+
+ approve: bool
+ type: Literal["mcp_approval_response"]
-class DataOpenAIResponseOutputMessageMcpCall(BaseModel):
+ id: Optional[str] = None
+
+ reason: Optional[str] = None
+
+
+class OpenAIResponseOutputMessageMcpCall(BaseModel):
id: str
"""Unique identifier for this MCP call"""
@@ -231,7 +300,7 @@ class DataOpenAIResponseOutputMessageMcpCall(BaseModel):
"""(Optional) Output result from the successful MCP call"""
-class DataOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
+class OpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
input_schema: Dict[str, Union[bool, float, str, List[object], object, None]]
"""JSON schema defining the tool's input parameters"""
@@ -242,40 +311,31 @@ class DataOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
"""(Optional) Description of what the tool does"""
-class DataOpenAIResponseOutputMessageMcpListTools(BaseModel):
+class OpenAIResponseOutputMessageMcpListTools(BaseModel):
id: str
"""Unique identifier for this MCP list tools operation"""
server_label: str
"""Label identifying the MCP server providing the tools"""
- tools: List[DataOpenAIResponseOutputMessageMcpListToolsTool]
+ tools: List[OpenAIResponseOutputMessageMcpListToolsTool]
"""List of available tools provided by the MCP server"""
type: Literal["mcp_list_tools"]
"""Tool call type identifier, always "mcp_list_tools" """
-Data: TypeAlias = Annotated[
+ItemListResponse: TypeAlias = Annotated[
Union[
- DataOpenAIResponseMessage,
- DataOpenAIResponseOutputMessageFunctionToolCall,
- DataOpenAIResponseOutputMessageFileSearchToolCall,
- DataOpenAIResponseOutputMessageWebSearchToolCall,
- DataOpenAIResponseOutputMessageMcpCall,
- DataOpenAIResponseOutputMessageMcpListTools,
+ OpenAIResponseMessage,
+ OpenAIResponseOutputMessageWebSearchToolCall,
+ OpenAIResponseOutputMessageFileSearchToolCall,
+ OpenAIResponseOutputMessageFunctionToolCall,
+ OpenAIResponseInputFunctionToolCallOutput,
+ OpenAIResponseMcpApprovalRequest,
+ OpenAIResponseMcpApprovalResponse,
+ OpenAIResponseOutputMessageMcpCall,
+ OpenAIResponseOutputMessageMcpListTools,
],
PropertyInfo(discriminator="type"),
]
-
-
-class ItemListResponse(BaseModel):
- data: List[Data]
-
- has_more: bool
-
- object: str
-
- first_id: Optional[str] = None
-
- last_id: Optional[str] = None
diff --git a/src/llama_stack_client/types/create_embeddings_response.py b/src/llama_stack_client/types/create_embeddings_response.py
index b5d04f1b..b4cd091a 100644
--- a/src/llama_stack_client/types/create_embeddings_response.py
+++ b/src/llama_stack_client/types/create_embeddings_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union
diff --git a/src/llama_stack_client/types/create_response.py b/src/llama_stack_client/types/create_response.py
index fbb519f0..e26f1cf1 100644
--- a/src/llama_stack_client/types/create_response.py
+++ b/src/llama_stack_client/types/create_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/delete_file_response.py b/src/llama_stack_client/types/delete_file_response.py
index 2188556f..281b2a8f 100644
--- a/src/llama_stack_client/types/delete_file_response.py
+++ b/src/llama_stack_client/types/delete_file_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/embedding_create_params.py b/src/llama_stack_client/types/embedding_create_params.py
index 2710ba55..12ce2799 100644
--- a/src/llama_stack_client/types/embedding_create_params.py
+++ b/src/llama_stack_client/types/embedding_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/file.py b/src/llama_stack_client/types/file.py
index b87d04ef..12769adb 100644
--- a/src/llama_stack_client/types/file.py
+++ b/src/llama_stack_client/types/file.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/file_create_params.py b/src/llama_stack_client/types/file_create_params.py
index 2be39a7a..de0e641c 100644
--- a/src/llama_stack_client/types/file_create_params.py
+++ b/src/llama_stack_client/types/file_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/file_list_params.py b/src/llama_stack_client/types/file_list_params.py
index 2b504e53..704bbde9 100644
--- a/src/llama_stack_client/types/file_list_params.py
+++ b/src/llama_stack_client/types/file_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/health_info.py b/src/llama_stack_client/types/health_info.py
index 86410ed0..032d5c81 100644
--- a/src/llama_stack_client/types/health_info.py
+++ b/src/llama_stack_client/types/health_info.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/list_files_response.py b/src/llama_stack_client/types/list_files_response.py
index fb42f298..7d2c166f 100644
--- a/src/llama_stack_client/types/list_files_response.py
+++ b/src/llama_stack_client/types/list_files_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/list_models_response.py b/src/llama_stack_client/types/list_models_response.py
index a36896b8..8efab86c 100644
--- a/src/llama_stack_client/types/list_models_response.py
+++ b/src/llama_stack_client/types/list_models_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/list_providers_response.py b/src/llama_stack_client/types/list_providers_response.py
index c75b6880..d3d0efb8 100644
--- a/src/llama_stack_client/types/list_providers_response.py
+++ b/src/llama_stack_client/types/list_providers_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/list_routes_response.py b/src/llama_stack_client/types/list_routes_response.py
index d038fe6d..8c690969 100644
--- a/src/llama_stack_client/types/list_routes_response.py
+++ b/src/llama_stack_client/types/list_routes_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/list_scoring_functions_response.py b/src/llama_stack_client/types/list_scoring_functions_response.py
index 2c044ba1..7e513afa 100644
--- a/src/llama_stack_client/types/list_scoring_functions_response.py
+++ b/src/llama_stack_client/types/list_scoring_functions_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/list_shields_response.py b/src/llama_stack_client/types/list_shields_response.py
index fabbc9da..4cbd9a61 100644
--- a/src/llama_stack_client/types/list_shields_response.py
+++ b/src/llama_stack_client/types/list_shields_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/list_tool_groups_response.py b/src/llama_stack_client/types/list_tool_groups_response.py
index 94c5c145..dfef579f 100644
--- a/src/llama_stack_client/types/list_tool_groups_response.py
+++ b/src/llama_stack_client/types/list_tool_groups_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/list_vector_stores_response.py b/src/llama_stack_client/types/list_vector_stores_response.py
index d4960217..925e0265 100644
--- a/src/llama_stack_client/types/list_vector_stores_response.py
+++ b/src/llama_stack_client/types/list_vector_stores_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/model.py b/src/llama_stack_client/types/model.py
index 5651667d..5807b0ed 100644
--- a/src/llama_stack_client/types/model.py
+++ b/src/llama_stack_client/types/model.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -17,7 +23,7 @@ class Model(BaseModel):
metadata: Dict[str, Union[bool, float, str, List[object], object, None]]
"""Any additional metadata for this model"""
- api_model_type: Literal["llm", "embedding"] = FieldInfo(alias="model_type")
+ api_model_type: Literal["llm", "embedding", "rerank"] = FieldInfo(alias="model_type")
"""The type of model (LLM or embedding model)"""
provider_id: str
diff --git a/src/llama_stack_client/types/model_list_response.py b/src/llama_stack_client/types/model_list_response.py
index 905cdb0f..b53ae421 100644
--- a/src/llama_stack_client/types/model_list_response.py
+++ b/src/llama_stack_client/types/model_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/model_register_params.py b/src/llama_stack_client/types/model_register_params.py
index fb1d9fb6..7fdda65d 100644
--- a/src/llama_stack_client/types/model_register_params.py
+++ b/src/llama_stack_client/types/model_register_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -15,7 +21,7 @@ class ModelRegisterParams(TypedDict, total=False):
metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
"""Any additional metadata for this model."""
- model_type: Literal["llm", "embedding"]
+ model_type: Literal["llm", "embedding", "rerank"]
"""The type of model to register."""
provider_id: str
diff --git a/src/llama_stack_client/types/models/__init__.py b/src/llama_stack_client/types/models/__init__.py
index f8ee8b14..d14ed874 100644
--- a/src/llama_stack_client/types/models/__init__.py
+++ b/src/llama_stack_client/types/models/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/moderation_create_params.py b/src/llama_stack_client/types/moderation_create_params.py
index 99b4228e..ac0933cf 100644
--- a/src/llama_stack_client/types/moderation_create_params.py
+++ b/src/llama_stack_client/types/moderation_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/provider_info.py b/src/llama_stack_client/types/provider_info.py
index 6b8a1ec6..9e95d211 100644
--- a/src/llama_stack_client/types/provider_info.py
+++ b/src/llama_stack_client/types/provider_info.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union
diff --git a/src/llama_stack_client/types/provider_list_response.py b/src/llama_stack_client/types/provider_list_response.py
index cdbc96f7..56f36ce5 100644
--- a/src/llama_stack_client/types/provider_list_response.py
+++ b/src/llama_stack_client/types/provider_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/query_chunks_response.py b/src/llama_stack_client/types/query_chunks_response.py
index 6a06b3f2..ab5fdb61 100644
--- a/src/llama_stack_client/types/query_chunks_response.py
+++ b/src/llama_stack_client/types/query_chunks_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/query_condition_param.py b/src/llama_stack_client/types/query_condition_param.py
deleted file mode 100644
index 59def1b4..00000000
--- a/src/llama_stack_client/types/query_condition_param.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Union, Iterable
-from typing_extensions import Literal, Required, TypedDict
-
-__all__ = ["QueryConditionParam"]
-
-
-class QueryConditionParam(TypedDict, total=False):
- key: Required[str]
- """The attribute key to filter on"""
-
- op: Required[Literal["eq", "ne", "gt", "lt"]]
- """The comparison operator to apply"""
-
- value: Required[Union[bool, float, str, Iterable[object], object, None]]
- """The value to compare against"""
diff --git a/src/llama_stack_client/types/query_spans_response.py b/src/llama_stack_client/types/query_spans_response.py
deleted file mode 100644
index a20c9b92..00000000
--- a/src/llama_stack_client/types/query_spans_response.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from .._models import BaseModel
-from .telemetry_query_spans_response import TelemetryQuerySpansResponse
-
-__all__ = ["QuerySpansResponse"]
-
-
-class QuerySpansResponse(BaseModel):
- data: TelemetryQuerySpansResponse
- """List of spans matching the query criteria"""
diff --git a/src/llama_stack_client/types/response_create_params.py b/src/llama_stack_client/types/response_create_params.py
index 219ba515..87c8ab68 100644
--- a/src/llama_stack_client/types/response_create_params.py
+++ b/src/llama_stack_client/types/response_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -17,16 +23,21 @@
"InputUnionMember1OpenAIResponseInputFunctionToolCallOutput",
"InputUnionMember1OpenAIResponseMcpApprovalRequest",
"InputUnionMember1OpenAIResponseMcpApprovalResponse",
+ "InputUnionMember1OpenAIResponseOutputMessageMcpCall",
+ "InputUnionMember1OpenAIResponseOutputMessageMcpListTools",
+ "InputUnionMember1OpenAIResponseOutputMessageMcpListToolsTool",
"InputUnionMember1OpenAIResponseMessage",
"InputUnionMember1OpenAIResponseMessageContentUnionMember1",
"InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"InputUnionMember1OpenAIResponseMessageContentUnionMember2",
- "InputUnionMember1OpenAIResponseMessageContentUnionMember2Annotation",
- "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
"Text",
"TextFormat",
"Tool",
@@ -183,6 +194,54 @@ class InputUnionMember1OpenAIResponseMcpApprovalResponse(TypedDict, total=False)
reason: str
+class InputUnionMember1OpenAIResponseOutputMessageMcpCall(TypedDict, total=False):
+ id: Required[str]
+ """Unique identifier for this MCP call"""
+
+ arguments: Required[str]
+ """JSON string containing the MCP call arguments"""
+
+ name: Required[str]
+ """Name of the MCP method being called"""
+
+ server_label: Required[str]
+ """Label identifying the MCP server handling the call"""
+
+ type: Required[Literal["mcp_call"]]
+ """Tool call type identifier, always "mcp_call" """
+
+ error: str
+ """(Optional) Error message if the MCP call failed"""
+
+ output: str
+ """(Optional) Output result from the successful MCP call"""
+
+
+class InputUnionMember1OpenAIResponseOutputMessageMcpListToolsTool(TypedDict, total=False):
+ input_schema: Required[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
+ """JSON schema defining the tool's input parameters"""
+
+ name: Required[str]
+ """Name of the tool"""
+
+ description: str
+ """(Optional) Description of what the tool does"""
+
+
+class InputUnionMember1OpenAIResponseOutputMessageMcpListTools(TypedDict, total=False):
+ id: Required[str]
+ """Unique identifier for this MCP list tools operation"""
+
+ server_label: Required[str]
+ """Label identifying the MCP server providing the tools"""
+
+ tools: Required[Iterable[InputUnionMember1OpenAIResponseOutputMessageMcpListToolsTool]]
+ """List of available tools provided by the MCP server"""
+
+ type: Required[Literal["mcp_list_tools"]]
+ """Tool call type identifier, always "mcp_list_tools" """
+
+
class InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(
TypedDict, total=False
):
@@ -212,7 +271,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember1OpenAIResponseInp
]
-class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(
+class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
TypedDict, total=False
):
file_id: Required[str]
@@ -228,7 +287,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR
"""Annotation type identifier, always "file_citation" """
-class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(
+class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
TypedDict, total=False
):
end_index: Required[int]
@@ -247,7 +306,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR
"""URL of the referenced web resource"""
-class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
+class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
TypedDict, total=False
):
container_id: Required[str]
@@ -263,7 +322,7 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR
type: Required[Literal["container_file_citation"]]
-class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(
+class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
TypedDict, total=False
):
file_id: Required[str]
@@ -273,22 +332,42 @@ class InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIR
type: Required[Literal["file_path"]]
-InputUnionMember1OpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Union[
- InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- InputUnionMember1OpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Union[
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
]
-class InputUnionMember1OpenAIResponseMessageContentUnionMember2(TypedDict, total=False):
- annotations: Required[Iterable[InputUnionMember1OpenAIResponseMessageContentUnionMember2Annotation]]
+class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(
+ TypedDict, total=False
+):
+ annotations: Required[
+ Iterable[
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
+ ]
+ ]
text: Required[str]
type: Required[Literal["output_text"]]
+class InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(TypedDict, total=False):
+ refusal: Required[str]
+ """Refusal text supplied by the model"""
+
+ type: Required[Literal["refusal"]]
+ """Content part type identifier, always "refusal" """
+
+
+InputUnionMember1OpenAIResponseMessageContentUnionMember2: TypeAlias = Union[
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ InputUnionMember1OpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+]
+
+
class InputUnionMember1OpenAIResponseMessage(TypedDict, total=False):
content: Required[
Union[
@@ -314,6 +393,8 @@ class InputUnionMember1OpenAIResponseMessage(TypedDict, total=False):
InputUnionMember1OpenAIResponseInputFunctionToolCallOutput,
InputUnionMember1OpenAIResponseMcpApprovalRequest,
InputUnionMember1OpenAIResponseMcpApprovalResponse,
+ InputUnionMember1OpenAIResponseOutputMessageMcpCall,
+ InputUnionMember1OpenAIResponseOutputMessageMcpListTools,
InputUnionMember1OpenAIResponseMessage,
]
diff --git a/src/llama_stack_client/types/response_delete_response.py b/src/llama_stack_client/types/response_delete_response.py
index e772fe35..ce229452 100644
--- a/src/llama_stack_client/types/response_delete_response.py
+++ b/src/llama_stack_client/types/response_delete_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/response_list_params.py b/src/llama_stack_client/types/response_list_params.py
index ca13adfa..5de4106e 100644
--- a/src/llama_stack_client/types/response_list_params.py
+++ b/src/llama_stack_client/types/response_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/response_list_response.py b/src/llama_stack_client/types/response_list_response.py
index 0164e109..8a091316 100644
--- a/src/llama_stack_client/types/response_list_response.py
+++ b/src/llama_stack_client/types/response_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -18,27 +24,34 @@
"InputOpenAIResponseInputFunctionToolCallOutput",
"InputOpenAIResponseMcpApprovalRequest",
"InputOpenAIResponseMcpApprovalResponse",
+ "InputOpenAIResponseOutputMessageMcpCall",
+ "InputOpenAIResponseOutputMessageMcpListTools",
+ "InputOpenAIResponseOutputMessageMcpListToolsTool",
"InputOpenAIResponseMessage",
"InputOpenAIResponseMessageContentUnionMember1",
"InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"InputOpenAIResponseMessageContentUnionMember2",
- "InputOpenAIResponseMessageContentUnionMember2Annotation",
- "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "InputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
"Output",
"OutputOpenAIResponseMessage",
"OutputOpenAIResponseMessageContentUnionMember1",
"OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"OutputOpenAIResponseMessageContentUnionMember2",
- "OutputOpenAIResponseMessageContentUnionMember2Annotation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
"OutputOpenAIResponseOutputMessageWebSearchToolCall",
"OutputOpenAIResponseOutputMessageFileSearchToolCall",
"OutputOpenAIResponseOutputMessageFileSearchToolCallResult",
@@ -165,6 +178,54 @@ class InputOpenAIResponseMcpApprovalResponse(BaseModel):
reason: Optional[str] = None
+class InputOpenAIResponseOutputMessageMcpCall(BaseModel):
+ id: str
+ """Unique identifier for this MCP call"""
+
+ arguments: str
+ """JSON string containing the MCP call arguments"""
+
+ name: str
+ """Name of the MCP method being called"""
+
+ server_label: str
+ """Label identifying the MCP server handling the call"""
+
+ type: Literal["mcp_call"]
+ """Tool call type identifier, always "mcp_call" """
+
+ error: Optional[str] = None
+ """(Optional) Error message if the MCP call failed"""
+
+ output: Optional[str] = None
+ """(Optional) Output result from the successful MCP call"""
+
+
+class InputOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
+ input_schema: Dict[str, Union[bool, float, str, List[object], object, None]]
+ """JSON schema defining the tool's input parameters"""
+
+ name: str
+ """Name of the tool"""
+
+ description: Optional[str] = None
+ """(Optional) Description of what the tool does"""
+
+
+class InputOpenAIResponseOutputMessageMcpListTools(BaseModel):
+ id: str
+ """Unique identifier for this MCP list tools operation"""
+
+ server_label: str
+ """Label identifying the MCP server providing the tools"""
+
+ tools: List[InputOpenAIResponseOutputMessageMcpListToolsTool]
+ """List of available tools provided by the MCP server"""
+
+ type: Literal["mcp_list_tools"]
+ """Tool call type identifier, always "mcp_list_tools" """
+
+
class InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
text: str
"""The text content of the input message"""
@@ -193,7 +254,9 @@ class InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCon
]
-class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -207,7 +270,9 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot
"""Annotation type identifier, always "file_citation" """
-class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -224,7 +289,9 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot
"""URL of the referenced web resource"""
-class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -238,7 +305,9 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot
type: Literal["container_file_citation"]
-class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -246,25 +315,44 @@ class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnot
type: Literal["file_path"]
-InputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class InputOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[InputOpenAIResponseMessageContentUnionMember2Annotation]
+class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
+ ]
text: str
type: Literal["output_text"]
+class InputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+InputOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ InputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class InputOpenAIResponseMessage(BaseModel):
content: Union[
str, List[InputOpenAIResponseMessageContentUnionMember1], List[InputOpenAIResponseMessageContentUnionMember2]
@@ -286,6 +374,8 @@ class InputOpenAIResponseMessage(BaseModel):
InputOpenAIResponseInputFunctionToolCallOutput,
InputOpenAIResponseMcpApprovalRequest,
InputOpenAIResponseMcpApprovalResponse,
+ InputOpenAIResponseOutputMessageMcpCall,
+ InputOpenAIResponseOutputMessageMcpListTools,
InputOpenAIResponseMessage,
]
@@ -318,7 +408,9 @@ class OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCo
]
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -332,7 +424,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
"""Annotation type identifier, always "file_citation" """
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -349,7 +443,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
"""URL of the referenced web resource"""
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -363,7 +459,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
type: Literal["container_file_citation"]
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -371,25 +469,44 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
type: Literal["file_path"]
-OutputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class OutputOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[OutputOpenAIResponseMessageContentUnionMember2Annotation]
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
+ ]
text: str
type: Literal["output_text"]
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+OutputOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class OutputOpenAIResponseMessage(BaseModel):
content: Union[
str, List[OutputOpenAIResponseMessageContentUnionMember1], List[OutputOpenAIResponseMessageContentUnionMember2]
@@ -718,6 +835,9 @@ class ResponseListResponse(BaseModel):
error: Optional[Error] = None
"""(Optional) Error details if the response generation failed"""
+ instructions: Optional[str] = None
+ """(Optional) System message inserted into the model's context"""
+
previous_response_id: Optional[str] = None
"""(Optional) ID of the previous response in a conversation"""
diff --git a/src/llama_stack_client/types/response_object.py b/src/llama_stack_client/types/response_object.py
index d9c1eb93..57f708ce 100644
--- a/src/llama_stack_client/types/response_object.py
+++ b/src/llama_stack_client/types/response_object.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -16,11 +22,13 @@
"OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"OutputOpenAIResponseMessageContentUnionMember2",
- "OutputOpenAIResponseMessageContentUnionMember2Annotation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
"OutputOpenAIResponseOutputMessageWebSearchToolCall",
"OutputOpenAIResponseOutputMessageFileSearchToolCall",
"OutputOpenAIResponseOutputMessageFileSearchToolCallResult",
@@ -74,7 +82,9 @@ class OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCo
]
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -88,7 +98,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
"""Annotation type identifier, always "file_citation" """
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -105,7 +117,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
"""URL of the referenced web resource"""
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -119,7 +133,9 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
type: Literal["container_file_citation"]
-class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -127,25 +143,44 @@ class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnno
type: Literal["file_path"]
-OutputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class OutputOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[OutputOpenAIResponseMessageContentUnionMember2Annotation]
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
+ ]
text: str
type: Literal["output_text"]
+class OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+OutputOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ OutputOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class OutputOpenAIResponseMessage(BaseModel):
content: Union[
str, List[OutputOpenAIResponseMessageContentUnionMember1], List[OutputOpenAIResponseMessageContentUnionMember2]
@@ -481,6 +516,9 @@ def output_text(self) -> str:
error: Optional[Error] = None
"""(Optional) Error details if the response generation failed"""
+ instructions: Optional[str] = None
+ """(Optional) System message inserted into the model's context"""
+
previous_response_id: Optional[str] = None
"""(Optional) ID of the previous response in a conversation"""
diff --git a/src/llama_stack_client/types/response_object_stream.py b/src/llama_stack_client/types/response_object_stream.py
index aef34c17..a75ac721 100644
--- a/src/llama_stack_client/types/response_object_stream.py
+++ b/src/llama_stack_client/types/response_object_stream.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -18,11 +24,13 @@
"OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2",
- "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2Annotation",
- "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
"OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseOutputMessageWebSearchToolCall",
"OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseOutputMessageFileSearchToolCall",
"OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseOutputMessageFileSearchToolCallResult",
@@ -38,11 +46,13 @@
"OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2",
- "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2Annotation",
- "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
"OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseOutputMessageWebSearchToolCall",
"OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseOutputMessageFileSearchToolCall",
"OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseOutputMessageFileSearchToolCallResult",
@@ -162,7 +172,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage
]
-class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(
+class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
BaseModel
):
file_id: str
@@ -178,7 +188,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage
"""Annotation type identifier, always "file_citation" """
-class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(
+class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
BaseModel
):
end_index: int
@@ -197,7 +207,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage
"""URL of the referenced web resource"""
-class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
+class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
BaseModel
):
container_id: str
@@ -213,7 +223,7 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage
type: Literal["container_file_citation"]
-class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(
+class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
BaseModel
):
file_id: str
@@ -223,20 +233,22 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage
type: Literal["file_path"]
-OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2(BaseModel):
+class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(
+ BaseModel
+):
annotations: List[
- OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2Annotation
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
]
text: str
@@ -244,6 +256,25 @@ class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage
type: Literal["output_text"]
+class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(
+ BaseModel
+):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class OpenAIResponseObjectStreamResponseOutputItemAddedItemOpenAIResponseMessage(BaseModel):
content: Union[
str,
@@ -452,7 +483,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC
]
-class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(
+class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
BaseModel
):
file_id: str
@@ -468,7 +499,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC
"""Annotation type identifier, always "file_citation" """
-class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(
+class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
BaseModel
):
end_index: int
@@ -487,7 +518,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC
"""URL of the referenced web resource"""
-class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
+class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
BaseModel
):
container_id: str
@@ -503,7 +534,7 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC
type: Literal["container_file_citation"]
-class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(
+class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
BaseModel
):
file_id: str
@@ -513,20 +544,22 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC
type: Literal["file_path"]
-OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2(BaseModel):
+class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(
+ BaseModel
+):
annotations: List[
- OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2Annotation
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
]
text: str
@@ -534,6 +567,25 @@ class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageC
type: Literal["output_text"]
+class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(
+ BaseModel
+):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class OpenAIResponseObjectStreamResponseOutputItemDoneItemOpenAIResponseMessage(BaseModel):
content: Union[
str,
diff --git a/src/llama_stack_client/types/responses/__init__.py b/src/llama_stack_client/types/responses/__init__.py
index cb934712..f3618e05 100644
--- a/src/llama_stack_client/types/responses/__init__.py
+++ b/src/llama_stack_client/types/responses/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/responses/input_item_list_params.py b/src/llama_stack_client/types/responses/input_item_list_params.py
index ff0e8f0a..de4dd691 100644
--- a/src/llama_stack_client/types/responses/input_item_list_params.py
+++ b/src/llama_stack_client/types/responses/input_item_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/responses/input_item_list_response.py b/src/llama_stack_client/types/responses/input_item_list_response.py
index 6862492d..3cb26346 100644
--- a/src/llama_stack_client/types/responses/input_item_list_response.py
+++ b/src/llama_stack_client/types/responses/input_item_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
@@ -16,16 +22,21 @@
"DataOpenAIResponseInputFunctionToolCallOutput",
"DataOpenAIResponseMcpApprovalRequest",
"DataOpenAIResponseMcpApprovalResponse",
+ "DataOpenAIResponseOutputMessageMcpCall",
+ "DataOpenAIResponseOutputMessageMcpListTools",
+ "DataOpenAIResponseOutputMessageMcpListToolsTool",
"DataOpenAIResponseMessage",
"DataOpenAIResponseMessageContentUnionMember1",
"DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
"DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
"DataOpenAIResponseMessageContentUnionMember2",
- "DataOpenAIResponseMessageContentUnionMember2Annotation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath",
+ "DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal",
]
@@ -130,6 +141,54 @@ class DataOpenAIResponseMcpApprovalResponse(BaseModel):
reason: Optional[str] = None
+class DataOpenAIResponseOutputMessageMcpCall(BaseModel):
+ id: str
+ """Unique identifier for this MCP call"""
+
+ arguments: str
+ """JSON string containing the MCP call arguments"""
+
+ name: str
+ """Name of the MCP method being called"""
+
+ server_label: str
+ """Label identifying the MCP server handling the call"""
+
+ type: Literal["mcp_call"]
+ """Tool call type identifier, always "mcp_call" """
+
+ error: Optional[str] = None
+ """(Optional) Error message if the MCP call failed"""
+
+ output: Optional[str] = None
+ """(Optional) Output result from the successful MCP call"""
+
+
+class DataOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
+ input_schema: Dict[str, Union[bool, float, str, List[object], object, None]]
+ """JSON schema defining the tool's input parameters"""
+
+ name: str
+ """Name of the tool"""
+
+ description: Optional[str] = None
+ """(Optional) Description of what the tool does"""
+
+
+class DataOpenAIResponseOutputMessageMcpListTools(BaseModel):
+ id: str
+ """Unique identifier for this MCP list tools operation"""
+
+ server_label: str
+ """Label identifying the MCP server providing the tools"""
+
+ tools: List[DataOpenAIResponseOutputMessageMcpListToolsTool]
+ """List of available tools provided by the MCP server"""
+
+ type: Literal["mcp_list_tools"]
+ """Tool call type identifier, always "mcp_list_tools" """
+
+
class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
text: str
"""The text content of the input message"""
@@ -158,7 +217,9 @@ class DataOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageCont
]
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation(
+ BaseModel
+):
file_id: str
"""Unique identifier of the referenced file"""
@@ -172,7 +233,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""Annotation type identifier, always "file_citation" """
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation(
+ BaseModel
+):
end_index: int
"""End position of the citation span in the content"""
@@ -189,7 +252,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
"""URL of the referenced web resource"""
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation(
+ BaseModel
+):
container_id: str
end_index: int
@@ -203,7 +268,9 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Literal["container_file_citation"]
-class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath(
+ BaseModel
+):
file_id: str
index: int
@@ -211,25 +278,44 @@ class DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnota
type: Literal["file_path"]
-DataOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation: TypeAlias = Annotated[
Union[
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- DataOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFileCitation,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationCitation,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationContainerFileCitation,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class DataOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[DataOpenAIResponseMessageContentUnionMember2Annotation]
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText(BaseModel):
+ annotations: List[
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputTextAnnotation
+ ]
text: str
type: Literal["output_text"]
+class DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal(BaseModel):
+ refusal: str
+ """Refusal text supplied by the model"""
+
+ type: Literal["refusal"]
+ """Content part type identifier, always "refusal" """
+
+
+DataOpenAIResponseMessageContentUnionMember2: TypeAlias = Annotated[
+ Union[
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseOutputMessageContentOutputText,
+ DataOpenAIResponseMessageContentUnionMember2OpenAIResponseContentPartRefusal,
+ ],
+ PropertyInfo(discriminator="type"),
+]
+
+
class DataOpenAIResponseMessage(BaseModel):
content: Union[
str, List[DataOpenAIResponseMessageContentUnionMember1], List[DataOpenAIResponseMessageContentUnionMember2]
@@ -251,6 +337,8 @@ class DataOpenAIResponseMessage(BaseModel):
DataOpenAIResponseInputFunctionToolCallOutput,
DataOpenAIResponseMcpApprovalRequest,
DataOpenAIResponseMcpApprovalResponse,
+ DataOpenAIResponseOutputMessageMcpCall,
+ DataOpenAIResponseOutputMessageMcpListTools,
DataOpenAIResponseMessage,
]
diff --git a/src/llama_stack_client/types/route_info.py b/src/llama_stack_client/types/route_info.py
index 671361b9..a78151d9 100644
--- a/src/llama_stack_client/types/route_info.py
+++ b/src/llama_stack_client/types/route_info.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/route_list_response.py b/src/llama_stack_client/types/route_list_response.py
index cec8e0e1..5eded63b 100644
--- a/src/llama_stack_client/types/route_list_response.py
+++ b/src/llama_stack_client/types/route_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/run_shield_response.py b/src/llama_stack_client/types/run_shield_response.py
index ba7bac0b..fd51a10c 100644
--- a/src/llama_stack_client/types/run_shield_response.py
+++ b/src/llama_stack_client/types/run_shield_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
diff --git a/src/llama_stack_client/types/safety_run_shield_params.py b/src/llama_stack_client/types/safety_run_shield_params.py
index 4e687a06..3ee72883 100644
--- a/src/llama_stack_client/types/safety_run_shield_params.py
+++ b/src/llama_stack_client/types/safety_run_shield_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/scoring_fn.py b/src/llama_stack_client/types/scoring_fn.py
index 947e569c..7b96d2b0 100644
--- a/src/llama_stack_client/types/scoring_fn.py
+++ b/src/llama_stack_client/types/scoring_fn.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/scoring_fn_params.py b/src/llama_stack_client/types/scoring_fn_params.py
index 937fd886..3b604109 100644
--- a/src/llama_stack_client/types/scoring_fn_params.py
+++ b/src/llama_stack_client/types/scoring_fn_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/scoring_fn_params_param.py b/src/llama_stack_client/types/scoring_fn_params_param.py
index 46264833..362c7121 100644
--- a/src/llama_stack_client/types/scoring_fn_params_param.py
+++ b/src/llama_stack_client/types/scoring_fn_params_param.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/scoring_function_list_response.py b/src/llama_stack_client/types/scoring_function_list_response.py
index bad85a54..9372fa59 100644
--- a/src/llama_stack_client/types/scoring_function_list_response.py
+++ b/src/llama_stack_client/types/scoring_function_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/scoring_function_register_params.py b/src/llama_stack_client/types/scoring_function_register_params.py
index bf58bc33..c0a00c04 100644
--- a/src/llama_stack_client/types/scoring_function_register_params.py
+++ b/src/llama_stack_client/types/scoring_function_register_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/scoring_score_batch_params.py b/src/llama_stack_client/types/scoring_score_batch_params.py
index 28dfa86e..546011c5 100644
--- a/src/llama_stack_client/types/scoring_score_batch_params.py
+++ b/src/llama_stack_client/types/scoring_score_batch_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/scoring_score_batch_response.py b/src/llama_stack_client/types/scoring_score_batch_response.py
index 7f5f7e24..e3e84a9d 100644
--- a/src/llama_stack_client/types/scoring_score_batch_response.py
+++ b/src/llama_stack_client/types/scoring_score_batch_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, Optional
diff --git a/src/llama_stack_client/types/scoring_score_params.py b/src/llama_stack_client/types/scoring_score_params.py
index baac066e..c141fb4e 100644
--- a/src/llama_stack_client/types/scoring_score_params.py
+++ b/src/llama_stack_client/types/scoring_score_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/scoring_score_response.py b/src/llama_stack_client/types/scoring_score_response.py
index 083416e5..478e9fb5 100644
--- a/src/llama_stack_client/types/scoring_score_response.py
+++ b/src/llama_stack_client/types/scoring_score_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict
diff --git a/src/llama_stack_client/types/shared/__init__.py b/src/llama_stack_client/types/shared/__init__.py
index 2d353f89..c18a9358 100644
--- a/src/llama_stack_client/types/shared/__init__.py
+++ b/src/llama_stack_client/types/shared/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .message import Message as Message
diff --git a/src/llama_stack_client/types/shared/agent_config.py b/src/llama_stack_client/types/shared/agent_config.py
index eb116159..71d53ca5 100644
--- a/src/llama_stack_client/types/shared/agent_config.py
+++ b/src/llama_stack_client/types/shared/agent_config.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/shared/completion_message.py b/src/llama_stack_client/types/shared/completion_message.py
index 61c10a5b..bed73ea4 100644
--- a/src/llama_stack_client/types/shared/completion_message.py
+++ b/src/llama_stack_client/types/shared/completion_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/shared/document.py b/src/llama_stack_client/types/shared/document.py
index 492e6abd..574f0baa 100644
--- a/src/llama_stack_client/types/shared/document.py
+++ b/src/llama_stack_client/types/shared/document.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/shared/interleaved_content.py b/src/llama_stack_client/types/shared/interleaved_content.py
index 852e487e..d0a087ad 100644
--- a/src/llama_stack_client/types/shared/interleaved_content.py
+++ b/src/llama_stack_client/types/shared/interleaved_content.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/shared/interleaved_content_item.py b/src/llama_stack_client/types/shared/interleaved_content_item.py
index cb034712..85e5d758 100644
--- a/src/llama_stack_client/types/shared/interleaved_content_item.py
+++ b/src/llama_stack_client/types/shared/interleaved_content_item.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union, Optional
diff --git a/src/llama_stack_client/types/shared/message.py b/src/llama_stack_client/types/shared/message.py
index 1da117ee..bb47f940 100644
--- a/src/llama_stack_client/types/shared/message.py
+++ b/src/llama_stack_client/types/shared/message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union
diff --git a/src/llama_stack_client/types/shared/param_type.py b/src/llama_stack_client/types/shared/param_type.py
index 199b0fd7..e3341da0 100644
--- a/src/llama_stack_client/types/shared/param_type.py
+++ b/src/llama_stack_client/types/shared/param_type.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union
diff --git a/src/llama_stack_client/types/shared/query_config.py b/src/llama_stack_client/types/shared/query_config.py
index a4a1f741..867badbb 100644
--- a/src/llama_stack_client/types/shared/query_config.py
+++ b/src/llama_stack_client/types/shared/query_config.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union, Optional
diff --git a/src/llama_stack_client/types/shared/query_result.py b/src/llama_stack_client/types/shared/query_result.py
index c623c6d6..c06afcfe 100644
--- a/src/llama_stack_client/types/shared/query_result.py
+++ b/src/llama_stack_client/types/shared/query_result.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/shared/response_format.py b/src/llama_stack_client/types/shared/response_format.py
index 537df8d5..0d601a23 100644
--- a/src/llama_stack_client/types/shared/response_format.py
+++ b/src/llama_stack_client/types/shared/response_format.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union
diff --git a/src/llama_stack_client/types/shared/safety_violation.py b/src/llama_stack_client/types/shared/safety_violation.py
index bea7ca93..dc18d9bf 100644
--- a/src/llama_stack_client/types/shared/safety_violation.py
+++ b/src/llama_stack_client/types/shared/safety_violation.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/shared/sampling_params.py b/src/llama_stack_client/types/shared/sampling_params.py
index 6823aee7..f34ed6f5 100644
--- a/src/llama_stack_client/types/shared/sampling_params.py
+++ b/src/llama_stack_client/types/shared/sampling_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
diff --git a/src/llama_stack_client/types/shared/scoring_result.py b/src/llama_stack_client/types/shared/scoring_result.py
index 61ad9b54..68788ca2 100644
--- a/src/llama_stack_client/types/shared/scoring_result.py
+++ b/src/llama_stack_client/types/shared/scoring_result.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union
diff --git a/src/llama_stack_client/types/shared/system_message.py b/src/llama_stack_client/types/shared/system_message.py
index a854e019..03c2d3d4 100644
--- a/src/llama_stack_client/types/shared/system_message.py
+++ b/src/llama_stack_client/types/shared/system_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/shared/tool_call.py b/src/llama_stack_client/types/shared/tool_call.py
index a35cd6dd..a0944a0d 100644
--- a/src/llama_stack_client/types/shared/tool_call.py
+++ b/src/llama_stack_client/types/shared/tool_call.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union
diff --git a/src/llama_stack_client/types/shared/tool_response_message.py b/src/llama_stack_client/types/shared/tool_response_message.py
index c7b8f21f..265c9f00 100644
--- a/src/llama_stack_client/types/shared/tool_response_message.py
+++ b/src/llama_stack_client/types/shared/tool_response_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/shared/user_message.py b/src/llama_stack_client/types/shared/user_message.py
index 2a89fbba..7afc3970 100644
--- a/src/llama_stack_client/types/shared/user_message.py
+++ b/src/llama_stack_client/types/shared/user_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
diff --git a/src/llama_stack_client/types/shared_params/__init__.py b/src/llama_stack_client/types/shared_params/__init__.py
index 894d8a8d..12061849 100644
--- a/src/llama_stack_client/types/shared_params/__init__.py
+++ b/src/llama_stack_client/types/shared_params/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .message import Message as Message
diff --git a/src/llama_stack_client/types/shared_params/agent_config.py b/src/llama_stack_client/types/shared_params/agent_config.py
index c1206bd5..d444e5da 100644
--- a/src/llama_stack_client/types/shared_params/agent_config.py
+++ b/src/llama_stack_client/types/shared_params/agent_config.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/completion_message.py b/src/llama_stack_client/types/shared_params/completion_message.py
index 43b2529e..61c3fc08 100644
--- a/src/llama_stack_client/types/shared_params/completion_message.py
+++ b/src/llama_stack_client/types/shared_params/completion_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/document.py b/src/llama_stack_client/types/shared_params/document.py
index db9cd51d..e73e0705 100644
--- a/src/llama_stack_client/types/shared_params/document.py
+++ b/src/llama_stack_client/types/shared_params/document.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/interleaved_content.py b/src/llama_stack_client/types/shared_params/interleaved_content.py
index 5ea3953a..855e23a7 100644
--- a/src/llama_stack_client/types/shared_params/interleaved_content.py
+++ b/src/llama_stack_client/types/shared_params/interleaved_content.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/interleaved_content_item.py b/src/llama_stack_client/types/shared_params/interleaved_content_item.py
index ed3daa32..79c2c09f 100644
--- a/src/llama_stack_client/types/shared_params/interleaved_content_item.py
+++ b/src/llama_stack_client/types/shared_params/interleaved_content_item.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/message.py b/src/llama_stack_client/types/shared_params/message.py
index 24a49afc..2a2c0f75 100644
--- a/src/llama_stack_client/types/shared_params/message.py
+++ b/src/llama_stack_client/types/shared_params/message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/query_config.py b/src/llama_stack_client/types/shared_params/query_config.py
index 91a5b596..5328bf42 100644
--- a/src/llama_stack_client/types/shared_params/query_config.py
+++ b/src/llama_stack_client/types/shared_params/query_config.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/response_format.py b/src/llama_stack_client/types/shared_params/response_format.py
index 53411700..c3146dfc 100644
--- a/src/llama_stack_client/types/shared_params/response_format.py
+++ b/src/llama_stack_client/types/shared_params/response_format.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/sampling_params.py b/src/llama_stack_client/types/shared_params/sampling_params.py
index e5eebddd..9be15690 100644
--- a/src/llama_stack_client/types/shared_params/sampling_params.py
+++ b/src/llama_stack_client/types/shared_params/sampling_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/system_message.py b/src/llama_stack_client/types/shared_params/system_message.py
index 7cf9535c..bb41f69d 100644
--- a/src/llama_stack_client/types/shared_params/system_message.py
+++ b/src/llama_stack_client/types/shared_params/system_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/tool_call.py b/src/llama_stack_client/types/shared_params/tool_call.py
index 16686e61..320b5a8e 100644
--- a/src/llama_stack_client/types/shared_params/tool_call.py
+++ b/src/llama_stack_client/types/shared_params/tool_call.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/tool_response_message.py b/src/llama_stack_client/types/shared_params/tool_response_message.py
index 789bbcde..db66b8d5 100644
--- a/src/llama_stack_client/types/shared_params/tool_response_message.py
+++ b/src/llama_stack_client/types/shared_params/tool_response_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shared_params/user_message.py b/src/llama_stack_client/types/shared_params/user_message.py
index 4b8e3de3..39170454 100644
--- a/src/llama_stack_client/types/shared_params/user_message.py
+++ b/src/llama_stack_client/types/shared_params/user_message.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/shield.py b/src/llama_stack_client/types/shield.py
index dd48dfae..4eaf7652 100644
--- a/src/llama_stack_client/types/shield.py
+++ b/src/llama_stack_client/types/shield.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/shield_list_response.py b/src/llama_stack_client/types/shield_list_response.py
index 0cba0500..957f71c7 100644
--- a/src/llama_stack_client/types/shield_list_response.py
+++ b/src/llama_stack_client/types/shield_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/shield_register_params.py b/src/llama_stack_client/types/shield_register_params.py
index 7ae0b2c1..90e25cc9 100644
--- a/src/llama_stack_client/types/shield_register_params.py
+++ b/src/llama_stack_client/types/shield_register_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/span_with_status.py b/src/llama_stack_client/types/span_with_status.py
deleted file mode 100644
index 04d124bd..00000000
--- a/src/llama_stack_client/types/span_with_status.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict, List, Union, Optional
-from datetime import datetime
-from typing_extensions import Literal
-
-from .._models import BaseModel
-
-__all__ = ["SpanWithStatus"]
-
-
-class SpanWithStatus(BaseModel):
- name: str
- """Human-readable name describing the operation this span represents"""
-
- span_id: str
- """Unique identifier for the span"""
-
- start_time: datetime
- """Timestamp when the operation began"""
-
- trace_id: str
- """Unique identifier for the trace this span belongs to"""
-
- attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None
- """(Optional) Key-value pairs containing additional metadata about the span"""
-
- end_time: Optional[datetime] = None
- """(Optional) Timestamp when the operation finished, if completed"""
-
- parent_span_id: Optional[str] = None
- """(Optional) Unique identifier for the parent span, if this is a child span"""
-
- status: Optional[Literal["ok", "error"]] = None
- """(Optional) The current status of the span"""
diff --git a/src/llama_stack_client/types/synthetic_data_generation_generate_params.py b/src/llama_stack_client/types/synthetic_data_generation_generate_params.py
index 5f55a97c..2f817132 100644
--- a/src/llama_stack_client/types/synthetic_data_generation_generate_params.py
+++ b/src/llama_stack_client/types/synthetic_data_generation_generate_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/synthetic_data_generation_response.py b/src/llama_stack_client/types/synthetic_data_generation_response.py
index cfb20f08..9332562f 100644
--- a/src/llama_stack_client/types/synthetic_data_generation_response.py
+++ b/src/llama_stack_client/types/synthetic_data_generation_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/telemetry_get_span_response.py b/src/llama_stack_client/types/telemetry_get_span_response.py
deleted file mode 100644
index 6826d4d0..00000000
--- a/src/llama_stack_client/types/telemetry_get_span_response.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict, List, Union, Optional
-from datetime import datetime
-
-from .._models import BaseModel
-
-__all__ = ["TelemetryGetSpanResponse"]
-
-
-class TelemetryGetSpanResponse(BaseModel):
- name: str
- """Human-readable name describing the operation this span represents"""
-
- span_id: str
- """Unique identifier for the span"""
-
- start_time: datetime
- """Timestamp when the operation began"""
-
- trace_id: str
- """Unique identifier for the trace this span belongs to"""
-
- attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None
- """(Optional) Key-value pairs containing additional metadata about the span"""
-
- end_time: Optional[datetime] = None
- """(Optional) Timestamp when the operation finished, if completed"""
-
- parent_span_id: Optional[str] = None
- """(Optional) Unique identifier for the parent span, if this is a child span"""
diff --git a/src/llama_stack_client/types/telemetry_get_span_tree_params.py b/src/llama_stack_client/types/telemetry_get_span_tree_params.py
deleted file mode 100644
index 92dc7e1d..00000000
--- a/src/llama_stack_client/types/telemetry_get_span_tree_params.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing_extensions import TypedDict
-
-from .._types import SequenceNotStr
-
-__all__ = ["TelemetryGetSpanTreeParams"]
-
-
-class TelemetryGetSpanTreeParams(TypedDict, total=False):
- attributes_to_return: SequenceNotStr[str]
- """The attributes to return in the tree."""
-
- max_depth: int
- """The maximum depth of the tree."""
diff --git a/src/llama_stack_client/types/telemetry_get_span_tree_response.py b/src/llama_stack_client/types/telemetry_get_span_tree_response.py
deleted file mode 100644
index b72e6158..00000000
--- a/src/llama_stack_client/types/telemetry_get_span_tree_response.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict
-from typing_extensions import TypeAlias
-
-from .span_with_status import SpanWithStatus
-
-__all__ = ["TelemetryGetSpanTreeResponse"]
-
-TelemetryGetSpanTreeResponse: TypeAlias = Dict[str, SpanWithStatus]
diff --git a/src/llama_stack_client/types/telemetry_query_metrics_params.py b/src/llama_stack_client/types/telemetry_query_metrics_params.py
deleted file mode 100644
index adf3f720..00000000
--- a/src/llama_stack_client/types/telemetry_query_metrics_params.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Iterable
-from typing_extensions import Literal, Required, TypedDict
-
-__all__ = ["TelemetryQueryMetricsParams", "LabelMatcher"]
-
-
-class TelemetryQueryMetricsParams(TypedDict, total=False):
- query_type: Required[Literal["range", "instant"]]
- """The type of query to perform."""
-
- start_time: Required[int]
- """The start time of the metric to query."""
-
- end_time: int
- """The end time of the metric to query."""
-
- granularity: str
- """The granularity of the metric to query."""
-
- label_matchers: Iterable[LabelMatcher]
- """The label matchers to apply to the metric."""
-
-
-class LabelMatcher(TypedDict, total=False):
- name: Required[str]
- """The name of the label to match"""
-
- operator: Required[Literal["=", "!=", "=~", "!~"]]
- """The comparison operator to use for matching"""
-
- value: Required[str]
- """The value to match against"""
diff --git a/src/llama_stack_client/types/telemetry_query_metrics_response.py b/src/llama_stack_client/types/telemetry_query_metrics_response.py
deleted file mode 100644
index e9f4264e..00000000
--- a/src/llama_stack_client/types/telemetry_query_metrics_response.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import List
-from typing_extensions import TypeAlias
-
-from .._models import BaseModel
-
-__all__ = [
- "TelemetryQueryMetricsResponse",
- "TelemetryQueryMetricsResponseItem",
- "TelemetryQueryMetricsResponseItemLabel",
- "TelemetryQueryMetricsResponseItemValue",
-]
-
-
-class TelemetryQueryMetricsResponseItemLabel(BaseModel):
- name: str
- """The name of the label"""
-
- value: str
- """The value of the label"""
-
-
-class TelemetryQueryMetricsResponseItemValue(BaseModel):
- timestamp: int
- """Unix timestamp when the metric value was recorded"""
-
- unit: str
-
- value: float
- """The numeric value of the metric at this timestamp"""
-
-
-class TelemetryQueryMetricsResponseItem(BaseModel):
- labels: List[TelemetryQueryMetricsResponseItemLabel]
- """List of labels associated with this metric series"""
-
- metric: str
- """The name of the metric"""
-
- values: List[TelemetryQueryMetricsResponseItemValue]
- """List of data points in chronological order"""
-
-
-TelemetryQueryMetricsResponse: TypeAlias = List[TelemetryQueryMetricsResponseItem]
diff --git a/src/llama_stack_client/types/telemetry_query_spans_params.py b/src/llama_stack_client/types/telemetry_query_spans_params.py
deleted file mode 100644
index 452439e3..00000000
--- a/src/llama_stack_client/types/telemetry_query_spans_params.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Iterable
-from typing_extensions import Required, TypedDict
-
-from .._types import SequenceNotStr
-from .query_condition_param import QueryConditionParam
-
-__all__ = ["TelemetryQuerySpansParams"]
-
-
-class TelemetryQuerySpansParams(TypedDict, total=False):
- attribute_filters: Required[Iterable[QueryConditionParam]]
- """The attribute filters to apply to the spans."""
-
- attributes_to_return: Required[SequenceNotStr[str]]
- """The attributes to return in the spans."""
-
- max_depth: int
- """The maximum depth of the tree."""
diff --git a/src/llama_stack_client/types/telemetry_query_spans_response.py b/src/llama_stack_client/types/telemetry_query_spans_response.py
deleted file mode 100644
index 49eaeb38..00000000
--- a/src/llama_stack_client/types/telemetry_query_spans_response.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict, List, Union, Optional
-from datetime import datetime
-from typing_extensions import TypeAlias
-
-from .._models import BaseModel
-
-__all__ = ["TelemetryQuerySpansResponse", "TelemetryQuerySpansResponseItem"]
-
-
-class TelemetryQuerySpansResponseItem(BaseModel):
- name: str
- """Human-readable name describing the operation this span represents"""
-
- span_id: str
- """Unique identifier for the span"""
-
- start_time: datetime
- """Timestamp when the operation began"""
-
- trace_id: str
- """Unique identifier for the trace this span belongs to"""
-
- attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None
- """(Optional) Key-value pairs containing additional metadata about the span"""
-
- end_time: Optional[datetime] = None
- """(Optional) Timestamp when the operation finished, if completed"""
-
- parent_span_id: Optional[str] = None
- """(Optional) Unique identifier for the parent span, if this is a child span"""
-
-
-TelemetryQuerySpansResponse: TypeAlias = List[TelemetryQuerySpansResponseItem]
diff --git a/src/llama_stack_client/types/telemetry_query_traces_params.py b/src/llama_stack_client/types/telemetry_query_traces_params.py
deleted file mode 100644
index 2a6eb334..00000000
--- a/src/llama_stack_client/types/telemetry_query_traces_params.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Iterable
-from typing_extensions import TypedDict
-
-from .._types import SequenceNotStr
-from .query_condition_param import QueryConditionParam
-
-__all__ = ["TelemetryQueryTracesParams"]
-
-
-class TelemetryQueryTracesParams(TypedDict, total=False):
- attribute_filters: Iterable[QueryConditionParam]
- """The attribute filters to apply to the traces."""
-
- limit: int
- """The limit of traces to return."""
-
- offset: int
- """The offset of the traces to return."""
-
- order_by: SequenceNotStr[str]
- """The order by of the traces to return."""
diff --git a/src/llama_stack_client/types/telemetry_query_traces_response.py b/src/llama_stack_client/types/telemetry_query_traces_response.py
deleted file mode 100644
index 01a1365d..00000000
--- a/src/llama_stack_client/types/telemetry_query_traces_response.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import List
-from typing_extensions import TypeAlias
-
-from .trace import Trace
-
-__all__ = ["TelemetryQueryTracesResponse"]
-
-TelemetryQueryTracesResponse: TypeAlias = List[Trace]
diff --git a/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py b/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py
deleted file mode 100644
index f0bdebbd..00000000
--- a/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Iterable
-from typing_extensions import Required, TypedDict
-
-from .._types import SequenceNotStr
-from .query_condition_param import QueryConditionParam
-
-__all__ = ["TelemetrySaveSpansToDatasetParams"]
-
-
-class TelemetrySaveSpansToDatasetParams(TypedDict, total=False):
- attribute_filters: Required[Iterable[QueryConditionParam]]
- """The attribute filters to apply to the spans."""
-
- attributes_to_save: Required[SequenceNotStr[str]]
- """The attributes to save to the dataset."""
-
- dataset_id: Required[str]
- """The ID of the dataset to save the spans to."""
-
- max_depth: int
- """The maximum depth of the tree."""
diff --git a/src/llama_stack_client/types/tool_def.py b/src/llama_stack_client/types/tool_def.py
index 4674f832..087e2d78 100644
--- a/src/llama_stack_client/types/tool_def.py
+++ b/src/llama_stack_client/types/tool_def.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/tool_def_param.py b/src/llama_stack_client/types/tool_def_param.py
index d14ef6cc..99e7def1 100644
--- a/src/llama_stack_client/types/tool_def_param.py
+++ b/src/llama_stack_client/types/tool_def_param.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_group.py b/src/llama_stack_client/types/tool_group.py
index 52fca005..a5e265f6 100644
--- a/src/llama_stack_client/types/tool_group.py
+++ b/src/llama_stack_client/types/tool_group.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/tool_invocation_result.py b/src/llama_stack_client/types/tool_invocation_result.py
index 4262a85b..a20cb09d 100644
--- a/src/llama_stack_client/types/tool_invocation_result.py
+++ b/src/llama_stack_client/types/tool_invocation_result.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/tool_list_params.py b/src/llama_stack_client/types/tool_list_params.py
index 38f4bf73..e13db358 100644
--- a/src/llama_stack_client/types/tool_list_params.py
+++ b/src/llama_stack_client/types/tool_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_list_response.py b/src/llama_stack_client/types/tool_list_response.py
index bb6c935f..240ed3b7 100644
--- a/src/llama_stack_client/types/tool_list_response.py
+++ b/src/llama_stack_client/types/tool_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/tool_runtime/__init__.py b/src/llama_stack_client/types/tool_runtime/__init__.py
index 43dd1925..db62ca58 100644
--- a/src/llama_stack_client/types/tool_runtime/__init__.py
+++ b/src/llama_stack_client/types/tool_runtime/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py
index 614a969c..095a2a69 100644
--- a/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py
+++ b/src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py
index f8aa463b..08d1f998 100644
--- a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py
+++ b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_runtime_invoke_tool_params.py b/src/llama_stack_client/types/tool_runtime_invoke_tool_params.py
index 03df2d40..1a299eba 100644
--- a/src/llama_stack_client/types/tool_runtime_invoke_tool_params.py
+++ b/src/llama_stack_client/types/tool_runtime_invoke_tool_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_runtime_list_tools_params.py b/src/llama_stack_client/types/tool_runtime_list_tools_params.py
index d4933940..ce00936b 100644
--- a/src/llama_stack_client/types/tool_runtime_list_tools_params.py
+++ b/src/llama_stack_client/types/tool_runtime_list_tools_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/tool_runtime_list_tools_response.py b/src/llama_stack_client/types/tool_runtime_list_tools_response.py
index cd65754f..6854cb06 100644
--- a/src/llama_stack_client/types/tool_runtime_list_tools_response.py
+++ b/src/llama_stack_client/types/tool_runtime_list_tools_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/toolgroup_list_response.py b/src/llama_stack_client/types/toolgroup_list_response.py
index 0f668de3..b468e07d 100644
--- a/src/llama_stack_client/types/toolgroup_list_response.py
+++ b/src/llama_stack_client/types/toolgroup_list_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
diff --git a/src/llama_stack_client/types/toolgroup_register_params.py b/src/llama_stack_client/types/toolgroup_register_params.py
index 2aa79960..d60d839a 100644
--- a/src/llama_stack_client/types/toolgroup_register_params.py
+++ b/src/llama_stack_client/types/toolgroup_register_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/trace.py b/src/llama_stack_client/types/trace.py
deleted file mode 100644
index 0657d616..00000000
--- a/src/llama_stack_client/types/trace.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Optional
-from datetime import datetime
-
-from .._models import BaseModel
-
-__all__ = ["Trace"]
-
-
-class Trace(BaseModel):
- root_span_id: str
- """Unique identifier for the root span that started this trace"""
-
- start_time: datetime
- """Timestamp when the trace began"""
-
- trace_id: str
- """Unique identifier for the trace"""
-
- end_time: Optional[datetime] = None
- """(Optional) Timestamp when the trace finished, if completed"""
diff --git a/src/llama_stack_client/types/vector_io_insert_params.py b/src/llama_stack_client/types/vector_io_insert_params.py
index 5b6580fe..5613251f 100644
--- a/src/llama_stack_client/types/vector_io_insert_params.py
+++ b/src/llama_stack_client/types/vector_io_insert_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_io_query_params.py b/src/llama_stack_client/types/vector_io_query_params.py
index f0569a58..a2fdc561 100644
--- a/src/llama_stack_client/types/vector_io_query_params.py
+++ b/src/llama_stack_client/types/vector_io_query_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_store.py b/src/llama_stack_client/types/vector_store.py
index cfcebd81..5b31e857 100644
--- a/src/llama_stack_client/types/vector_store.py
+++ b/src/llama_stack_client/types/vector_store.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import builtins
diff --git a/src/llama_stack_client/types/vector_store_create_params.py b/src/llama_stack_client/types/vector_store_create_params.py
index 7eb3743b..c4930972 100644
--- a/src/llama_stack_client/types/vector_store_create_params.py
+++ b/src/llama_stack_client/types/vector_store_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_store_delete_response.py b/src/llama_stack_client/types/vector_store_delete_response.py
index 29637547..85689115 100644
--- a/src/llama_stack_client/types/vector_store_delete_response.py
+++ b/src/llama_stack_client/types/vector_store_delete_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/src/llama_stack_client/types/vector_store_list_params.py b/src/llama_stack_client/types/vector_store_list_params.py
index 176a6279..b94f4150 100644
--- a/src/llama_stack_client/types/vector_store_list_params.py
+++ b/src/llama_stack_client/types/vector_store_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_store_search_params.py b/src/llama_stack_client/types/vector_store_search_params.py
index 96d4be63..0fd26224 100644
--- a/src/llama_stack_client/types/vector_store_search_params.py
+++ b/src/llama_stack_client/types/vector_store_search_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_store_search_response.py b/src/llama_stack_client/types/vector_store_search_response.py
index 2f9a1076..48e5fb8f 100644
--- a/src/llama_stack_client/types/vector_store_search_response.py
+++ b/src/llama_stack_client/types/vector_store_search_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/vector_store_update_params.py b/src/llama_stack_client/types/vector_store_update_params.py
index d1e069ed..6326c6df 100644
--- a/src/llama_stack_client/types/vector_store_update_params.py
+++ b/src/llama_stack_client/types/vector_store_update_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/__init__.py b/src/llama_stack_client/types/vector_stores/__init__.py
index ee2835d8..e74ba448 100644
--- a/src/llama_stack_client/types/vector_stores/__init__.py
+++ b/src/llama_stack_client/types/vector_stores/__init__.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/file_batch_create_params.py b/src/llama_stack_client/types/vector_stores/file_batch_create_params.py
index ed1f28fe..030ef097 100644
--- a/src/llama_stack_client/types/vector_stores/file_batch_create_params.py
+++ b/src/llama_stack_client/types/vector_stores/file_batch_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/file_batch_list_files_params.py b/src/llama_stack_client/types/vector_stores/file_batch_list_files_params.py
index 2ffa8417..13cfde23 100644
--- a/src/llama_stack_client/types/vector_stores/file_batch_list_files_params.py
+++ b/src/llama_stack_client/types/vector_stores/file_batch_list_files_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/file_content_response.py b/src/llama_stack_client/types/vector_stores/file_content_response.py
index 035a34a8..25db83c5 100644
--- a/src/llama_stack_client/types/vector_stores/file_content_response.py
+++ b/src/llama_stack_client/types/vector_stores/file_content_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union
diff --git a/src/llama_stack_client/types/vector_stores/file_create_params.py b/src/llama_stack_client/types/vector_stores/file_create_params.py
index a75716b3..2220f3ad 100644
--- a/src/llama_stack_client/types/vector_stores/file_create_params.py
+++ b/src/llama_stack_client/types/vector_stores/file_create_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/file_delete_response.py b/src/llama_stack_client/types/vector_stores/file_delete_response.py
index f24e1910..741ad38b 100644
--- a/src/llama_stack_client/types/vector_stores/file_delete_response.py
+++ b/src/llama_stack_client/types/vector_stores/file_delete_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ..._models import BaseModel
diff --git a/src/llama_stack_client/types/vector_stores/file_list_params.py b/src/llama_stack_client/types/vector_stores/file_list_params.py
index 7174242d..adfeac65 100644
--- a/src/llama_stack_client/types/vector_stores/file_list_params.py
+++ b/src/llama_stack_client/types/vector_stores/file_list_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/file_update_params.py b/src/llama_stack_client/types/vector_stores/file_update_params.py
index fddfc8c6..511b804a 100644
--- a/src/llama_stack_client/types/vector_stores/file_update_params.py
+++ b/src/llama_stack_client/types/vector_stores/file_update_params.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/src/llama_stack_client/types/vector_stores/list_vector_store_files_in_batch_response.py b/src/llama_stack_client/types/vector_stores/list_vector_store_files_in_batch_response.py
index 34ca9e46..7a7c7dba 100644
--- a/src/llama_stack_client/types/vector_stores/list_vector_store_files_in_batch_response.py
+++ b/src/llama_stack_client/types/vector_stores/list_vector_store_files_in_batch_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
diff --git a/src/llama_stack_client/types/vector_stores/vector_store_file.py b/src/llama_stack_client/types/vector_stores/vector_store_file.py
index 243a00df..03aaef6d 100644
--- a/src/llama_stack_client/types/vector_stores/vector_store_file.py
+++ b/src/llama_stack_client/types/vector_stores/vector_store_file.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Dict, List, Union, Optional
diff --git a/src/llama_stack_client/types/vector_stores/vector_store_file_batches.py b/src/llama_stack_client/types/vector_stores/vector_store_file_batches.py
index 738f7edc..07164609 100644
--- a/src/llama_stack_client/types/vector_stores/vector_store_file_batches.py
+++ b/src/llama_stack_client/types/vector_stores/vector_store_file_batches.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
diff --git a/src/llama_stack_client/types/version_info.py b/src/llama_stack_client/types/version_info.py
index 001d05cb..d2d0c8fd 100644
--- a/src/llama_stack_client/types/version_info.py
+++ b/src/llama_stack_client/types/version_info.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from .._models import BaseModel
diff --git a/tests/__init__.py b/tests/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/__init__.py b/tests/api_resources/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/__init__.py
+++ b/tests/api_resources/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/alpha/__init__.py b/tests/api_resources/alpha/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/alpha/__init__.py
+++ b/tests/api_resources/alpha/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/alpha/agents/__init__.py b/tests/api_resources/alpha/agents/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/alpha/agents/__init__.py
+++ b/tests/api_resources/alpha/agents/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/alpha/agents/test_session.py b/tests/api_resources/alpha/agents/test_session.py
index 9c49e6bc..554c2d4e 100644
--- a/tests/api_resources/alpha/agents/test_session.py
+++ b/tests/api_resources/alpha/agents/test_session.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/agents/test_steps.py b/tests/api_resources/alpha/agents/test_steps.py
index 5bf35fc3..c001dd23 100644
--- a/tests/api_resources/alpha/agents/test_steps.py
+++ b/tests/api_resources/alpha/agents/test_steps.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/agents/test_turn.py b/tests/api_resources/alpha/agents/test_turn.py
index 9a2a500f..26f4a7b7 100644
--- a/tests/api_resources/alpha/agents/test_turn.py
+++ b/tests/api_resources/alpha/agents/test_turn.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/eval/__init__.py b/tests/api_resources/alpha/eval/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/alpha/eval/__init__.py
+++ b/tests/api_resources/alpha/eval/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/alpha/eval/test_jobs.py b/tests/api_resources/alpha/eval/test_jobs.py
index f4ea9ce1..42844d80 100644
--- a/tests/api_resources/alpha/eval/test_jobs.py
+++ b/tests/api_resources/alpha/eval/test_jobs.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/post_training/__init__.py b/tests/api_resources/alpha/post_training/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/alpha/post_training/__init__.py
+++ b/tests/api_resources/alpha/post_training/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/alpha/post_training/test_job.py b/tests/api_resources/alpha/post_training/test_job.py
index bec18796..611bf4b6 100644
--- a/tests/api_resources/alpha/post_training/test_job.py
+++ b/tests/api_resources/alpha/post_training/test_job.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/test_agents.py b/tests/api_resources/alpha/test_agents.py
index 075bd478..3324871c 100644
--- a/tests/api_resources/alpha/test_agents.py
+++ b/tests/api_resources/alpha/test_agents.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_benchmarks.py b/tests/api_resources/alpha/test_benchmarks.py
similarity index 82%
rename from tests/api_resources/test_benchmarks.py
rename to tests/api_resources/alpha/test_benchmarks.py
index 97d3d5c9..71ad6bc2 100644
--- a/tests/api_resources/test_benchmarks.py
+++ b/tests/api_resources/alpha/test_benchmarks.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -9,7 +15,7 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types import Benchmark, BenchmarkListResponse
+from llama_stack_client.types.alpha import Benchmark, BenchmarkListResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -19,14 +25,14 @@ class TestBenchmarks:
@parametrize
def test_method_retrieve(self, client: LlamaStackClient) -> None:
- benchmark = client.benchmarks.retrieve(
+ benchmark = client.alpha.benchmarks.retrieve(
"benchmark_id",
)
assert_matches_type(Benchmark, benchmark, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: LlamaStackClient) -> None:
- response = client.benchmarks.with_raw_response.retrieve(
+ response = client.alpha.benchmarks.with_raw_response.retrieve(
"benchmark_id",
)
@@ -37,7 +43,7 @@ def test_raw_response_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None:
- with client.benchmarks.with_streaming_response.retrieve(
+ with client.alpha.benchmarks.with_streaming_response.retrieve(
"benchmark_id",
) as response:
assert not response.is_closed
@@ -51,18 +57,18 @@ def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `benchmark_id` but received ''"):
- client.benchmarks.with_raw_response.retrieve(
+ client.alpha.benchmarks.with_raw_response.retrieve(
"",
)
@parametrize
def test_method_list(self, client: LlamaStackClient) -> None:
- benchmark = client.benchmarks.list()
+ benchmark = client.alpha.benchmarks.list()
assert_matches_type(BenchmarkListResponse, benchmark, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
- response = client.benchmarks.with_raw_response.list()
+ response = client.alpha.benchmarks.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -71,7 +77,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
- with client.benchmarks.with_streaming_response.list() as response:
+ with client.alpha.benchmarks.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -82,7 +88,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_register(self, client: LlamaStackClient) -> None:
- benchmark = client.benchmarks.register(
+ benchmark = client.alpha.benchmarks.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -91,7 +97,7 @@ def test_method_register(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_register_with_all_params(self, client: LlamaStackClient) -> None:
- benchmark = client.benchmarks.register(
+ benchmark = client.alpha.benchmarks.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -103,7 +109,7 @@ def test_method_register_with_all_params(self, client: LlamaStackClient) -> None
@parametrize
def test_raw_response_register(self, client: LlamaStackClient) -> None:
- response = client.benchmarks.with_raw_response.register(
+ response = client.alpha.benchmarks.with_raw_response.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -116,7 +122,7 @@ def test_raw_response_register(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_register(self, client: LlamaStackClient) -> None:
- with client.benchmarks.with_streaming_response.register(
+ with client.alpha.benchmarks.with_streaming_response.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -137,14 +143,14 @@ class TestAsyncBenchmarks:
@parametrize
async def test_method_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
- benchmark = await async_client.benchmarks.retrieve(
+ benchmark = await async_client.alpha.benchmarks.retrieve(
"benchmark_id",
)
assert_matches_type(Benchmark, benchmark, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.benchmarks.with_raw_response.retrieve(
+ response = await async_client.alpha.benchmarks.with_raw_response.retrieve(
"benchmark_id",
)
@@ -155,7 +161,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient)
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.benchmarks.with_streaming_response.retrieve(
+ async with async_client.alpha.benchmarks.with_streaming_response.retrieve(
"benchmark_id",
) as response:
assert not response.is_closed
@@ -169,18 +175,18 @@ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackCl
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `benchmark_id` but received ''"):
- await async_client.benchmarks.with_raw_response.retrieve(
+ await async_client.alpha.benchmarks.with_raw_response.retrieve(
"",
)
@parametrize
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
- benchmark = await async_client.benchmarks.list()
+ benchmark = await async_client.alpha.benchmarks.list()
assert_matches_type(BenchmarkListResponse, benchmark, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.benchmarks.with_raw_response.list()
+ response = await async_client.alpha.benchmarks.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -189,7 +195,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.benchmarks.with_streaming_response.list() as response:
+ async with async_client.alpha.benchmarks.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -200,7 +206,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
@parametrize
async def test_method_register(self, async_client: AsyncLlamaStackClient) -> None:
- benchmark = await async_client.benchmarks.register(
+ benchmark = await async_client.alpha.benchmarks.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -209,7 +215,7 @@ async def test_method_register(self, async_client: AsyncLlamaStackClient) -> Non
@parametrize
async def test_method_register_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- benchmark = await async_client.benchmarks.register(
+ benchmark = await async_client.alpha.benchmarks.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -221,7 +227,7 @@ async def test_method_register_with_all_params(self, async_client: AsyncLlamaSta
@parametrize
async def test_raw_response_register(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.benchmarks.with_raw_response.register(
+ response = await async_client.alpha.benchmarks.with_raw_response.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
@@ -234,7 +240,7 @@ async def test_raw_response_register(self, async_client: AsyncLlamaStackClient)
@parametrize
async def test_streaming_response_register(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.benchmarks.with_streaming_response.register(
+ async with async_client.alpha.benchmarks.with_streaming_response.register(
benchmark_id="benchmark_id",
dataset_id="dataset_id",
scoring_functions=["string"],
diff --git a/tests/api_resources/alpha/test_eval.py b/tests/api_resources/alpha/test_eval.py
index 88bd0c0c..1ee20010 100644
--- a/tests/api_resources/alpha/test_eval.py
+++ b/tests/api_resources/alpha/test_eval.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/test_inference.py b/tests/api_resources/alpha/test_inference.py
index 551e2213..d1308222 100644
--- a/tests/api_resources/alpha/test_inference.py
+++ b/tests/api_resources/alpha/test_inference.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/alpha/test_post_training.py b/tests/api_resources/alpha/test_post_training.py
index 14229811..92f45593 100644
--- a/tests/api_resources/alpha/test_post_training.py
+++ b/tests/api_resources/alpha/test_post_training.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/beta/__init__.py b/tests/api_resources/beta/__init__.py
new file mode 100644
index 00000000..fd8019a9
--- /dev/null
+++ b/tests/api_resources/beta/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/test_datasets.py b/tests/api_resources/beta/test_datasets.py
similarity index 83%
rename from tests/api_resources/test_datasets.py
rename to tests/api_resources/beta/test_datasets.py
index eee1de8c..7a6fc7c9 100644
--- a/tests/api_resources/test_datasets.py
+++ b/tests/api_resources/beta/test_datasets.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -9,7 +15,7 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types import (
+from llama_stack_client.types.beta import (
DatasetListResponse,
DatasetIterrowsResponse,
DatasetRegisterResponse,
@@ -24,14 +30,14 @@ class TestDatasets:
@parametrize
def test_method_retrieve(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.retrieve(
+ dataset = client.beta.datasets.retrieve(
"dataset_id",
)
assert_matches_type(DatasetRetrieveResponse, dataset, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: LlamaStackClient) -> None:
- response = client.datasets.with_raw_response.retrieve(
+ response = client.beta.datasets.with_raw_response.retrieve(
"dataset_id",
)
@@ -42,7 +48,7 @@ def test_raw_response_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None:
- with client.datasets.with_streaming_response.retrieve(
+ with client.beta.datasets.with_streaming_response.retrieve(
"dataset_id",
) as response:
assert not response.is_closed
@@ -56,18 +62,18 @@ def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- client.datasets.with_raw_response.retrieve(
+ client.beta.datasets.with_raw_response.retrieve(
"",
)
@parametrize
def test_method_list(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.list()
+ dataset = client.beta.datasets.list()
assert_matches_type(DatasetListResponse, dataset, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
- response = client.datasets.with_raw_response.list()
+ response = client.beta.datasets.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -76,7 +82,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
- with client.datasets.with_streaming_response.list() as response:
+ with client.beta.datasets.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -87,7 +93,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_appendrows(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.appendrows(
+ dataset = client.beta.datasets.appendrows(
dataset_id="dataset_id",
rows=[{"foo": True}],
)
@@ -95,7 +101,7 @@ def test_method_appendrows(self, client: LlamaStackClient) -> None:
@parametrize
def test_raw_response_appendrows(self, client: LlamaStackClient) -> None:
- response = client.datasets.with_raw_response.appendrows(
+ response = client.beta.datasets.with_raw_response.appendrows(
dataset_id="dataset_id",
rows=[{"foo": True}],
)
@@ -107,7 +113,7 @@ def test_raw_response_appendrows(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_appendrows(self, client: LlamaStackClient) -> None:
- with client.datasets.with_streaming_response.appendrows(
+ with client.beta.datasets.with_streaming_response.appendrows(
dataset_id="dataset_id",
rows=[{"foo": True}],
) as response:
@@ -122,21 +128,21 @@ def test_streaming_response_appendrows(self, client: LlamaStackClient) -> None:
@parametrize
def test_path_params_appendrows(self, client: LlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- client.datasets.with_raw_response.appendrows(
+ client.beta.datasets.with_raw_response.appendrows(
dataset_id="",
rows=[{"foo": True}],
)
@parametrize
def test_method_iterrows(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.iterrows(
+ dataset = client.beta.datasets.iterrows(
dataset_id="dataset_id",
)
assert_matches_type(DatasetIterrowsResponse, dataset, path=["response"])
@parametrize
def test_method_iterrows_with_all_params(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.iterrows(
+ dataset = client.beta.datasets.iterrows(
dataset_id="dataset_id",
limit=0,
start_index=0,
@@ -145,7 +151,7 @@ def test_method_iterrows_with_all_params(self, client: LlamaStackClient) -> None
@parametrize
def test_raw_response_iterrows(self, client: LlamaStackClient) -> None:
- response = client.datasets.with_raw_response.iterrows(
+ response = client.beta.datasets.with_raw_response.iterrows(
dataset_id="dataset_id",
)
@@ -156,7 +162,7 @@ def test_raw_response_iterrows(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_iterrows(self, client: LlamaStackClient) -> None:
- with client.datasets.with_streaming_response.iterrows(
+ with client.beta.datasets.with_streaming_response.iterrows(
dataset_id="dataset_id",
) as response:
assert not response.is_closed
@@ -170,13 +176,13 @@ def test_streaming_response_iterrows(self, client: LlamaStackClient) -> None:
@parametrize
def test_path_params_iterrows(self, client: LlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- client.datasets.with_raw_response.iterrows(
+ client.beta.datasets.with_raw_response.iterrows(
dataset_id="",
)
@parametrize
def test_method_register(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.register(
+ dataset = client.beta.datasets.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -187,7 +193,7 @@ def test_method_register(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_register_with_all_params(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.register(
+ dataset = client.beta.datasets.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -200,7 +206,7 @@ def test_method_register_with_all_params(self, client: LlamaStackClient) -> None
@parametrize
def test_raw_response_register(self, client: LlamaStackClient) -> None:
- response = client.datasets.with_raw_response.register(
+ response = client.beta.datasets.with_raw_response.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -215,7 +221,7 @@ def test_raw_response_register(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_register(self, client: LlamaStackClient) -> None:
- with client.datasets.with_streaming_response.register(
+ with client.beta.datasets.with_streaming_response.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -232,14 +238,14 @@ def test_streaming_response_register(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_unregister(self, client: LlamaStackClient) -> None:
- dataset = client.datasets.unregister(
+ dataset = client.beta.datasets.unregister(
"dataset_id",
)
assert dataset is None
@parametrize
def test_raw_response_unregister(self, client: LlamaStackClient) -> None:
- response = client.datasets.with_raw_response.unregister(
+ response = client.beta.datasets.with_raw_response.unregister(
"dataset_id",
)
@@ -250,7 +256,7 @@ def test_raw_response_unregister(self, client: LlamaStackClient) -> None:
@parametrize
def test_streaming_response_unregister(self, client: LlamaStackClient) -> None:
- with client.datasets.with_streaming_response.unregister(
+ with client.beta.datasets.with_streaming_response.unregister(
"dataset_id",
) as response:
assert not response.is_closed
@@ -264,7 +270,7 @@ def test_streaming_response_unregister(self, client: LlamaStackClient) -> None:
@parametrize
def test_path_params_unregister(self, client: LlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- client.datasets.with_raw_response.unregister(
+ client.beta.datasets.with_raw_response.unregister(
"",
)
@@ -276,14 +282,14 @@ class TestAsyncDatasets:
@parametrize
async def test_method_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.retrieve(
+ dataset = await async_client.beta.datasets.retrieve(
"dataset_id",
)
assert_matches_type(DatasetRetrieveResponse, dataset, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.datasets.with_raw_response.retrieve(
+ response = await async_client.beta.datasets.with_raw_response.retrieve(
"dataset_id",
)
@@ -294,7 +300,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient)
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.datasets.with_streaming_response.retrieve(
+ async with async_client.beta.datasets.with_streaming_response.retrieve(
"dataset_id",
) as response:
assert not response.is_closed
@@ -308,18 +314,18 @@ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackCl
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- await async_client.datasets.with_raw_response.retrieve(
+ await async_client.beta.datasets.with_raw_response.retrieve(
"",
)
@parametrize
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.list()
+ dataset = await async_client.beta.datasets.list()
assert_matches_type(DatasetListResponse, dataset, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.datasets.with_raw_response.list()
+ response = await async_client.beta.datasets.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -328,7 +334,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.datasets.with_streaming_response.list() as response:
+ async with async_client.beta.datasets.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -339,7 +345,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
@parametrize
async def test_method_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.appendrows(
+ dataset = await async_client.beta.datasets.appendrows(
dataset_id="dataset_id",
rows=[{"foo": True}],
)
@@ -347,7 +353,7 @@ async def test_method_appendrows(self, async_client: AsyncLlamaStackClient) -> N
@parametrize
async def test_raw_response_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.datasets.with_raw_response.appendrows(
+ response = await async_client.beta.datasets.with_raw_response.appendrows(
dataset_id="dataset_id",
rows=[{"foo": True}],
)
@@ -359,7 +365,7 @@ async def test_raw_response_appendrows(self, async_client: AsyncLlamaStackClient
@parametrize
async def test_streaming_response_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.datasets.with_streaming_response.appendrows(
+ async with async_client.beta.datasets.with_streaming_response.appendrows(
dataset_id="dataset_id",
rows=[{"foo": True}],
) as response:
@@ -374,21 +380,21 @@ async def test_streaming_response_appendrows(self, async_client: AsyncLlamaStack
@parametrize
async def test_path_params_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- await async_client.datasets.with_raw_response.appendrows(
+ await async_client.beta.datasets.with_raw_response.appendrows(
dataset_id="",
rows=[{"foo": True}],
)
@parametrize
async def test_method_iterrows(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.iterrows(
+ dataset = await async_client.beta.datasets.iterrows(
dataset_id="dataset_id",
)
assert_matches_type(DatasetIterrowsResponse, dataset, path=["response"])
@parametrize
async def test_method_iterrows_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.iterrows(
+ dataset = await async_client.beta.datasets.iterrows(
dataset_id="dataset_id",
limit=0,
start_index=0,
@@ -397,7 +403,7 @@ async def test_method_iterrows_with_all_params(self, async_client: AsyncLlamaSta
@parametrize
async def test_raw_response_iterrows(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.datasets.with_raw_response.iterrows(
+ response = await async_client.beta.datasets.with_raw_response.iterrows(
dataset_id="dataset_id",
)
@@ -408,7 +414,7 @@ async def test_raw_response_iterrows(self, async_client: AsyncLlamaStackClient)
@parametrize
async def test_streaming_response_iterrows(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.datasets.with_streaming_response.iterrows(
+ async with async_client.beta.datasets.with_streaming_response.iterrows(
dataset_id="dataset_id",
) as response:
assert not response.is_closed
@@ -422,13 +428,13 @@ async def test_streaming_response_iterrows(self, async_client: AsyncLlamaStackCl
@parametrize
async def test_path_params_iterrows(self, async_client: AsyncLlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- await async_client.datasets.with_raw_response.iterrows(
+ await async_client.beta.datasets.with_raw_response.iterrows(
dataset_id="",
)
@parametrize
async def test_method_register(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.register(
+ dataset = await async_client.beta.datasets.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -439,7 +445,7 @@ async def test_method_register(self, async_client: AsyncLlamaStackClient) -> Non
@parametrize
async def test_method_register_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.register(
+ dataset = await async_client.beta.datasets.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -452,7 +458,7 @@ async def test_method_register_with_all_params(self, async_client: AsyncLlamaSta
@parametrize
async def test_raw_response_register(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.datasets.with_raw_response.register(
+ response = await async_client.beta.datasets.with_raw_response.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -467,7 +473,7 @@ async def test_raw_response_register(self, async_client: AsyncLlamaStackClient)
@parametrize
async def test_streaming_response_register(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.datasets.with_streaming_response.register(
+ async with async_client.beta.datasets.with_streaming_response.register(
purpose="post-training/messages",
source={
"type": "uri",
@@ -484,14 +490,14 @@ async def test_streaming_response_register(self, async_client: AsyncLlamaStackCl
@parametrize
async def test_method_unregister(self, async_client: AsyncLlamaStackClient) -> None:
- dataset = await async_client.datasets.unregister(
+ dataset = await async_client.beta.datasets.unregister(
"dataset_id",
)
assert dataset is None
@parametrize
async def test_raw_response_unregister(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.datasets.with_raw_response.unregister(
+ response = await async_client.beta.datasets.with_raw_response.unregister(
"dataset_id",
)
@@ -502,7 +508,7 @@ async def test_raw_response_unregister(self, async_client: AsyncLlamaStackClient
@parametrize
async def test_streaming_response_unregister(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.datasets.with_streaming_response.unregister(
+ async with async_client.beta.datasets.with_streaming_response.unregister(
"dataset_id",
) as response:
assert not response.is_closed
@@ -516,6 +522,6 @@ async def test_streaming_response_unregister(self, async_client: AsyncLlamaStack
@parametrize
async def test_path_params_unregister(self, async_client: AsyncLlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
- await async_client.datasets.with_raw_response.unregister(
+ await async_client.beta.datasets.with_raw_response.unregister(
"",
)
diff --git a/tests/api_resources/chat/__init__.py b/tests/api_resources/chat/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/chat/__init__.py
+++ b/tests/api_resources/chat/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py
index 7a1e9d41..45d508c0 100644
--- a/tests/api_resources/chat/test_completions.py
+++ b/tests/api_resources/chat/test_completions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/conversations/__init__.py b/tests/api_resources/conversations/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/conversations/__init__.py
+++ b/tests/api_resources/conversations/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/conversations/test_items.py b/tests/api_resources/conversations/test_items.py
index a0d58fa2..2d2ce821 100644
--- a/tests/api_resources/conversations/test_items.py
+++ b/tests/api_resources/conversations/test_items.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
@@ -9,6 +15,7 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
+from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
from llama_stack_client.types.conversations import (
ItemGetResponse,
ItemListResponse,
@@ -91,42 +98,41 @@ def test_path_params_create(self, client: LlamaStackClient) -> None:
def test_method_list(self, client: LlamaStackClient) -> None:
item = client.conversations.items.list(
conversation_id="conversation_id",
- after="string",
- include=["code_interpreter_call.outputs"],
+ )
+ assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
+ item = client.conversations.items.list(
+ conversation_id="conversation_id",
+ after="after",
+ include=["web_search_call.action.sources"],
limit=0,
order="asc",
)
- assert_matches_type(ItemListResponse, item, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
response = client.conversations.items.with_raw_response.list(
conversation_id="conversation_id",
- after="string",
- include=["code_interpreter_call.outputs"],
- limit=0,
- order="asc",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = response.parse()
- assert_matches_type(ItemListResponse, item, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"])
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
with client.conversations.items.with_streaming_response.list(
conversation_id="conversation_id",
- after="string",
- include=["code_interpreter_call.outputs"],
- limit=0,
- order="asc",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = response.parse()
- assert_matches_type(ItemListResponse, item, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ItemListResponse], item, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -135,10 +141,6 @@ def test_path_params_list(self, client: LlamaStackClient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `conversation_id` but received ''"):
client.conversations.items.with_raw_response.list(
conversation_id="",
- after="string",
- include=["code_interpreter_call.outputs"],
- limit=0,
- order="asc",
)
@parametrize
@@ -265,42 +267,41 @@ async def test_path_params_create(self, async_client: AsyncLlamaStackClient) ->
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
item = await async_client.conversations.items.list(
conversation_id="conversation_id",
- after="string",
- include=["code_interpreter_call.outputs"],
+ )
+ assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
+ item = await async_client.conversations.items.list(
+ conversation_id="conversation_id",
+ after="after",
+ include=["web_search_call.action.sources"],
limit=0,
order="asc",
)
- assert_matches_type(ItemListResponse, item, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
response = await async_client.conversations.items.with_raw_response.list(
conversation_id="conversation_id",
- after="string",
- include=["code_interpreter_call.outputs"],
- limit=0,
- order="asc",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = await response.parse()
- assert_matches_type(ItemListResponse, item, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
async with async_client.conversations.items.with_streaming_response.list(
conversation_id="conversation_id",
- after="string",
- include=["code_interpreter_call.outputs"],
- limit=0,
- order="asc",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
item = await response.parse()
- assert_matches_type(ItemListResponse, item, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ItemListResponse], item, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -309,10 +310,6 @@ async def test_path_params_list(self, async_client: AsyncLlamaStackClient) -> No
with pytest.raises(ValueError, match=r"Expected a non-empty value for `conversation_id` but received ''"):
await async_client.conversations.items.with_raw_response.list(
conversation_id="",
- after="string",
- include=["code_interpreter_call.outputs"],
- limit=0,
- order="asc",
)
@parametrize
diff --git a/tests/api_resources/models/__init__.py b/tests/api_resources/models/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/models/__init__.py
+++ b/tests/api_resources/models/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/models/test_openai.py b/tests/api_resources/models/test_openai.py
index f94d2bf6..6a9acf23 100644
--- a/tests/api_resources/models/test_openai.py
+++ b/tests/api_resources/models/test_openai.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/responses/__init__.py b/tests/api_resources/responses/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/responses/__init__.py
+++ b/tests/api_resources/responses/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/responses/test_input_items.py b/tests/api_resources/responses/test_input_items.py
index a0160f72..a0452e8d 100644
--- a/tests/api_resources/responses/test_input_items.py
+++ b/tests/api_resources/responses/test_input_items.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_completions.py b/tests/api_resources/test_completions.py
index 23964bdf..314c6d95 100644
--- a/tests/api_resources/test_completions.py
+++ b/tests/api_resources/test_completions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_conversations.py b/tests/api_resources/test_conversations.py
index a499ac91..20eab5ac 100644
--- a/tests/api_resources/test_conversations.py
+++ b/tests/api_resources/test_conversations.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_embeddings.py b/tests/api_resources/test_embeddings.py
index 5296e9c0..8890240f 100644
--- a/tests/api_resources/test_embeddings.py
+++ b/tests/api_resources/test_embeddings.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_files.py b/tests/api_resources/test_files.py
index 83b763ab..d2c83ff0 100644
--- a/tests/api_resources/test_files.py
+++ b/tests/api_resources/test_files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_inspect.py b/tests/api_resources/test_inspect.py
index a43abe6c..be117b12 100644
--- a/tests/api_resources/test_inspect.py
+++ b/tests/api_resources/test_inspect.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_models.py b/tests/api_resources/test_models.py
index 2e3f15be..13e98aa3 100644
--- a/tests/api_resources/test_models.py
+++ b/tests/api_resources/test_models.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_moderations.py b/tests/api_resources/test_moderations.py
index cbc77db1..5d022f3d 100644
--- a/tests/api_resources/test_moderations.py
+++ b/tests/api_resources/test_moderations.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_providers.py b/tests/api_resources/test_providers.py
index b23a84bd..2e8da2f5 100644
--- a/tests/api_resources/test_providers.py
+++ b/tests/api_resources/test_providers.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_responses.py b/tests/api_resources/test_responses.py
index 9e862716..5ef731fd 100644
--- a/tests/api_resources/test_responses.py
+++ b/tests/api_resources/test_responses.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_routes.py b/tests/api_resources/test_routes.py
index 12b51f28..9c863f26 100644
--- a/tests/api_resources/test_routes.py
+++ b/tests/api_resources/test_routes.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_safety.py b/tests/api_resources/test_safety.py
index 257dfd76..f3c13a98 100644
--- a/tests/api_resources/test_safety.py
+++ b/tests/api_resources/test_safety.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_scoring.py b/tests/api_resources/test_scoring.py
index ed46bd07..6f933ffc 100644
--- a/tests/api_resources/test_scoring.py
+++ b/tests/api_resources/test_scoring.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_scoring_functions.py b/tests/api_resources/test_scoring_functions.py
index 44556317..74421e2f 100644
--- a/tests/api_resources/test_scoring_functions.py
+++ b/tests/api_resources/test_scoring_functions.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_shields.py b/tests/api_resources/test_shields.py
index 8eaab0f9..f50e9979 100644
--- a/tests/api_resources/test_shields.py
+++ b/tests/api_resources/test_shields.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_synthetic_data_generation.py b/tests/api_resources/test_synthetic_data_generation.py
index c383770e..c40ddede 100644
--- a/tests/api_resources/test_synthetic_data_generation.py
+++ b/tests/api_resources/test_synthetic_data_generation.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_telemetry.py b/tests/api_resources/test_telemetry.py
deleted file mode 100644
index 07075c52..00000000
--- a/tests/api_resources/test_telemetry.py
+++ /dev/null
@@ -1,809 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import os
-from typing import Any, cast
-
-import pytest
-
-from tests.utils import assert_matches_type
-from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types import (
- Trace,
- TelemetryGetSpanResponse,
- TelemetryQuerySpansResponse,
- TelemetryGetSpanTreeResponse,
- TelemetryQueryTracesResponse,
- TelemetryQueryMetricsResponse,
-)
-
-base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-
-
-class TestTelemetry:
- parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
-
- @parametrize
- def test_method_get_span(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.get_span(
- span_id="span_id",
- trace_id="trace_id",
- )
- assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"])
-
- @parametrize
- def test_raw_response_get_span(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.get_span(
- span_id="span_id",
- trace_id="trace_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"])
-
- @parametrize
- def test_streaming_response_get_span(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.get_span(
- span_id="span_id",
- trace_id="trace_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- def test_path_params_get_span(self, client: LlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"):
- client.telemetry.with_raw_response.get_span(
- span_id="span_id",
- trace_id="",
- )
-
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"):
- client.telemetry.with_raw_response.get_span(
- span_id="",
- trace_id="trace_id",
- )
-
- @parametrize
- def test_method_get_span_tree(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.get_span_tree(
- span_id="span_id",
- )
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- @parametrize
- def test_method_get_span_tree_with_all_params(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.get_span_tree(
- span_id="span_id",
- attributes_to_return=["string"],
- max_depth=0,
- )
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- @parametrize
- def test_raw_response_get_span_tree(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.get_span_tree(
- span_id="span_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- @parametrize
- def test_streaming_response_get_span_tree(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.get_span_tree(
- span_id="span_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- def test_path_params_get_span_tree(self, client: LlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"):
- client.telemetry.with_raw_response.get_span_tree(
- span_id="",
- )
-
- @parametrize
- def test_method_get_trace(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.get_trace(
- "trace_id",
- )
- assert_matches_type(Trace, telemetry, path=["response"])
-
- @parametrize
- def test_raw_response_get_trace(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.get_trace(
- "trace_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert_matches_type(Trace, telemetry, path=["response"])
-
- @parametrize
- def test_streaming_response_get_trace(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.get_trace(
- "trace_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert_matches_type(Trace, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- def test_path_params_get_trace(self, client: LlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"):
- client.telemetry.with_raw_response.get_trace(
- "",
- )
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_method_query_metrics(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- )
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_method_query_metrics_with_all_params(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- end_time=0,
- granularity="granularity",
- label_matchers=[
- {
- "name": "name",
- "operator": "=",
- "value": "value",
- }
- ],
- )
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_raw_response_query_metrics(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_streaming_response_query_metrics(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_path_params_query_metrics(self, client: LlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `metric_name` but received ''"):
- client.telemetry.with_raw_response.query_metrics(
- metric_name="",
- query_type="range",
- start_time=0,
- )
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_method_query_spans(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- )
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_method_query_spans_with_all_params(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- max_depth=0,
- )
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_raw_response_query_spans(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_streaming_response_query_spans(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_method_query_traces(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.query_traces()
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_method_query_traces_with_all_params(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.query_traces(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- limit=0,
- offset=0,
- order_by=["string"],
- )
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_raw_response_query_traces(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.query_traces()
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- def test_streaming_response_query_traces(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.query_traces() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- def test_method_save_spans_to_dataset(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- )
- assert telemetry is None
-
- @parametrize
- def test_method_save_spans_to_dataset_with_all_params(self, client: LlamaStackClient) -> None:
- telemetry = client.telemetry.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- max_depth=0,
- )
- assert telemetry is None
-
- @parametrize
- def test_raw_response_save_spans_to_dataset(self, client: LlamaStackClient) -> None:
- response = client.telemetry.with_raw_response.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = response.parse()
- assert telemetry is None
-
- @parametrize
- def test_streaming_response_save_spans_to_dataset(self, client: LlamaStackClient) -> None:
- with client.telemetry.with_streaming_response.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = response.parse()
- assert telemetry is None
-
- assert cast(Any, response.is_closed) is True
-
-
-class TestAsyncTelemetry:
- parametrize = pytest.mark.parametrize(
- "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
- )
-
- @parametrize
- async def test_method_get_span(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.get_span(
- span_id="span_id",
- trace_id="trace_id",
- )
- assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"])
-
- @parametrize
- async def test_raw_response_get_span(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.get_span(
- span_id="span_id",
- trace_id="trace_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"])
-
- @parametrize
- async def test_streaming_response_get_span(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.get_span(
- span_id="span_id",
- trace_id="trace_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert_matches_type(TelemetryGetSpanResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- async def test_path_params_get_span(self, async_client: AsyncLlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"):
- await async_client.telemetry.with_raw_response.get_span(
- span_id="span_id",
- trace_id="",
- )
-
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"):
- await async_client.telemetry.with_raw_response.get_span(
- span_id="",
- trace_id="trace_id",
- )
-
- @parametrize
- async def test_method_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.get_span_tree(
- span_id="span_id",
- )
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- @parametrize
- async def test_method_get_span_tree_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.get_span_tree(
- span_id="span_id",
- attributes_to_return=["string"],
- max_depth=0,
- )
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- @parametrize
- async def test_raw_response_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.get_span_tree(
- span_id="span_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- @parametrize
- async def test_streaming_response_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.get_span_tree(
- span_id="span_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert_matches_type(TelemetryGetSpanTreeResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- async def test_path_params_get_span_tree(self, async_client: AsyncLlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"):
- await async_client.telemetry.with_raw_response.get_span_tree(
- span_id="",
- )
-
- @parametrize
- async def test_method_get_trace(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.get_trace(
- "trace_id",
- )
- assert_matches_type(Trace, telemetry, path=["response"])
-
- @parametrize
- async def test_raw_response_get_trace(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.get_trace(
- "trace_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert_matches_type(Trace, telemetry, path=["response"])
-
- @parametrize
- async def test_streaming_response_get_trace(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.get_trace(
- "trace_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert_matches_type(Trace, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- async def test_path_params_get_trace(self, async_client: AsyncLlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `trace_id` but received ''"):
- await async_client.telemetry.with_raw_response.get_trace(
- "",
- )
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_method_query_metrics(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- )
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_method_query_metrics_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- end_time=0,
- granularity="granularity",
- label_matchers=[
- {
- "name": "name",
- "operator": "=",
- "value": "value",
- }
- ],
- )
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_raw_response_query_metrics(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_streaming_response_query_metrics(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.query_metrics(
- metric_name="metric_name",
- query_type="range",
- start_time=0,
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert_matches_type(TelemetryQueryMetricsResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_path_params_query_metrics(self, async_client: AsyncLlamaStackClient) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `metric_name` but received ''"):
- await async_client.telemetry.with_raw_response.query_metrics(
- metric_name="",
- query_type="range",
- start_time=0,
- )
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_method_query_spans(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- )
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_method_query_spans_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- max_depth=0,
- )
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_raw_response_query_spans(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_streaming_response_query_spans(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.query_spans(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_return=["string"],
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert_matches_type(TelemetryQuerySpansResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_method_query_traces(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.query_traces()
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_method_query_traces_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.query_traces(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- limit=0,
- offset=0,
- order_by=["string"],
- )
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_raw_response_query_traces(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.query_traces()
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- @pytest.mark.skip(reason="unsupported query params in java / kotlin")
- @parametrize
- async def test_streaming_response_query_traces(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.query_traces() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert_matches_type(TelemetryQueryTracesResponse, telemetry, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- async def test_method_save_spans_to_dataset(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- )
- assert telemetry is None
-
- @parametrize
- async def test_method_save_spans_to_dataset_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
- telemetry = await async_client.telemetry.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- max_depth=0,
- )
- assert telemetry is None
-
- @parametrize
- async def test_raw_response_save_spans_to_dataset(self, async_client: AsyncLlamaStackClient) -> None:
- response = await async_client.telemetry.with_raw_response.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- telemetry = await response.parse()
- assert telemetry is None
-
- @parametrize
- async def test_streaming_response_save_spans_to_dataset(self, async_client: AsyncLlamaStackClient) -> None:
- async with async_client.telemetry.with_streaming_response.save_spans_to_dataset(
- attribute_filters=[
- {
- "key": "key",
- "op": "eq",
- "value": True,
- }
- ],
- attributes_to_save=["string"],
- dataset_id="dataset_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- telemetry = await response.parse()
- assert telemetry is None
-
- assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_tool_runtime.py b/tests/api_resources/test_tool_runtime.py
index fa79b1ba..b25a41a7 100644
--- a/tests/api_resources/test_tool_runtime.py
+++ b/tests/api_resources/test_tool_runtime.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_toolgroups.py b/tests/api_resources/test_toolgroups.py
index 1b8e5bce..6ae5ae0d 100644
--- a/tests/api_resources/test_toolgroups.py
+++ b/tests/api_resources/test_toolgroups.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_tools.py b/tests/api_resources/test_tools.py
index 6fafb9f9..3a8497cf 100644
--- a/tests/api_resources/test_tools.py
+++ b/tests/api_resources/test_tools.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_vector_io.py b/tests/api_resources/test_vector_io.py
index c62a58d3..2e37edcf 100644
--- a/tests/api_resources/test_vector_io.py
+++ b/tests/api_resources/test_vector_io.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/test_vector_stores.py b/tests/api_resources/test_vector_stores.py
index 2aed2e9e..a18c883f 100644
--- a/tests/api_resources/test_vector_stores.py
+++ b/tests/api_resources/test_vector_stores.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/tool_runtime/__init__.py b/tests/api_resources/tool_runtime/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/tool_runtime/__init__.py
+++ b/tests/api_resources/tool_runtime/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/tool_runtime/test_rag_tool.py b/tests/api_resources/tool_runtime/test_rag_tool.py
index dab816ab..8dd7e752 100644
--- a/tests/api_resources/tool_runtime/test_rag_tool.py
+++ b/tests/api_resources/tool_runtime/test_rag_tool.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/vector_stores/__init__.py b/tests/api_resources/vector_stores/__init__.py
index fd8019a9..6a8e62e9 100644
--- a/tests/api_resources/vector_stores/__init__.py
+++ b/tests/api_resources/vector_stores/__init__.py
@@ -1 +1,7 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/vector_stores/test_file_batches.py b/tests/api_resources/vector_stores/test_file_batches.py
index 9cc63496..e9a36ea5 100644
--- a/tests/api_resources/vector_stores/test_file_batches.py
+++ b/tests/api_resources/vector_stores/test_file_batches.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/api_resources/vector_stores/test_files.py b/tests/api_resources/vector_stores/test_files.py
index cf38bc2b..8058c623 100644
--- a/tests/api_resources/vector_stores/test_files.py
+++ b/tests/api_resources/vector_stores/test_files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/conftest.py b/tests/conftest.py
index ddadec32..58374a34 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/integration/test_agent_responses_e2e.py b/tests/integration/test_agent_responses_e2e.py
new file mode 100644
index 00000000..ecb13f3a
--- /dev/null
+++ b/tests/integration/test_agent_responses_e2e.py
@@ -0,0 +1,157 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+import io
+import os
+import time
+from uuid import uuid4
+
+import pytest
+
+from llama_stack_client import BadRequestError, AgentEventLogger
+from llama_stack_client.types import ResponseObject, response_create_params
+from llama_stack_client.lib.agents.agent import Agent
+
+MODEL_ID = os.environ.get("LLAMA_STACK_TEST_MODEL")
+BASE_URL = os.environ.get("TEST_API_BASE_URL")
+KNOWLEDGE_SNIPPET = "SKYRIM-DRAGON-ALLOY"
+_VECTOR_STORE_READY_TIMEOUT = 60.0
+_VECTOR_STORE_POLL_INTERVAL = 0.5
+
+
+def _wrap_response_retrieval(client) -> None:
+ original_retrieve = client.responses.retrieve
+
+ def retrying_retrieve(response_id: str, **kwargs):
+ attempts = 0
+ while True:
+ try:
+ return original_retrieve(response_id, **kwargs)
+ except BadRequestError as exc:
+ if getattr(exc, "status_code", None) != 400 or attempts >= 5:
+ raise
+ time.sleep(0.2)
+ attempts += 1
+
+ client.responses.retrieve = retrying_retrieve # type: ignore[assignment]
+
+
+def _create_vector_store_with_document(client) -> str:
+ file_payload = io.BytesIO(
+ f"The secret project codename is {KNOWLEDGE_SNIPPET}. Preserve the hyphens exactly.".encode("utf-8")
+ )
+ uploaded_file = client.files.create(
+ file=("agent_e2e_notes.txt", file_payload, "text/plain"),
+ purpose="assistants",
+ )
+
+ vector_store = client.vector_stores.create(name=f"agent-e2e-{uuid4().hex[:8]}")
+ vector_store_file = client.vector_stores.files.create(
+ vector_store_id=vector_store.id,
+ file_id=uploaded_file.id,
+ )
+
+ deadline = time.time() + _VECTOR_STORE_READY_TIMEOUT
+ while vector_store_file.status != "completed":
+ if vector_store_file.status in {"failed", "cancelled"}:
+ raise RuntimeError(f"Vector store ingestion did not succeed: {vector_store_file.status}")
+ if time.time() > deadline:
+ raise TimeoutError("Vector store file ingest timed out")
+ time.sleep(_VECTOR_STORE_POLL_INTERVAL)
+ vector_store_file = client.vector_stores.files.retrieve(
+ vector_store_id=vector_store.id,
+ file_id=vector_store_file.id,
+ )
+
+ return vector_store.id
+
+
+pytestmark = pytest.mark.skipif(
+ MODEL_ID is None or BASE_URL in (None, "http://127.0.0.1:4010"),
+ reason="requires a running llama stack server, TEST_API_BASE_URL, and LLAMA_STACK_TEST_MODEL",
+)
+
+
+def test_agent_streaming_and_follow_up_turn(client) -> None:
+ _wrap_response_retrieval(client)
+ vector_store_id = _create_vector_store_with_document(client)
+
+ agent = Agent(
+ client=client,
+ model=MODEL_ID,
+ instructions="You can search the uploaded vector store to answer with precise facts.",
+ tools=[{"type": "file_search", "vector_store_ids": [vector_store_id]}],
+ )
+
+ session_id = agent.create_session(f"agent-session-{uuid4().hex[:8]}")
+
+ messages: list[response_create_params.InputUnionMember1] = [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Retrieve the secret project codename from the knowledge base and reply as 'codename: '.",
+ }
+ ],
+ }
+ ]
+
+ event_logger = AgentEventLogger()
+ stream_chunks = []
+
+ for chunk in agent.create_turn(messages=messages, session_id=session_id, stream=True):
+ stream_chunks.append(chunk)
+ # Drain the event logger for streaming responses (no-op assertions but ensures coverage).
+ for printable in event_logger.log([chunk]):
+ _ = printable
+
+ completed_chunks = [chunk for chunk in stream_chunks if chunk.response is not None]
+ assert completed_chunks, "Expected streaming turn to yield a final response chunk"
+
+ streamed_response = completed_chunks[-1].response
+ assert isinstance(streamed_response, ResponseObject)
+ first_response_id = streamed_response.id
+
+ assert streamed_response.model == MODEL_ID
+ assert agent._last_response_id == first_response_id
+ assert agent._session_last_response_id.get(session_id) == first_response_id
+ assert streamed_response.output, "Response output should include tool and message items"
+
+ tool_call_outputs = [item for item in streamed_response.output if getattr(item, "type", None) == "file_search_call"]
+ assert tool_call_outputs, "Expected a file_search tool call in the response output"
+ assert any(
+ KNOWLEDGE_SNIPPET in getattr(result, "text", "")
+ for output in tool_call_outputs
+ for result in getattr(output, "results", []) or []
+ ), "Vector store results should surface the knowledge snippet"
+
+ assert KNOWLEDGE_SNIPPET in streamed_response.output_text, "Assistant reply should incorporate retrieved snippet"
+
+ follow_up_messages: list[response_create_params.InputUnionMember1] = [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Briefly explain why that codename matters.",
+ }
+ ],
+ }
+ ]
+
+ follow_up_response = agent.create_turn(
+ messages=follow_up_messages,
+ session_id=session_id,
+ stream=False,
+ )
+
+ assert isinstance(follow_up_response, ResponseObject)
+ assert follow_up_response.previous_response_id == first_response_id
+ assert agent._last_response_id == follow_up_response.id
+ assert KNOWLEDGE_SNIPPET in follow_up_response.output_text
diff --git a/tests/integration/test_agent_turn_step_events.py b/tests/integration/test_agent_turn_step_events.py
new file mode 100644
index 00000000..e6a45b70
--- /dev/null
+++ b/tests/integration/test_agent_turn_step_events.py
@@ -0,0 +1,327 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""Integration tests for agent turn/step event model.
+
+These tests verify the core architecture of the turn/step event system:
+1. Turn = complete interaction loop
+2. Inference steps = model thinking/deciding
+3. Tool execution steps = ANY tool executing (server OR client-side)
+
+Key architectural validations:
+- Server-side tools (file_search, web_search) appear as tool_execution steps
+- Client-side tools (function) appear as tool_execution steps
+- Both are properly annotated with metadata
+"""
+
+import io
+import os
+import time
+from uuid import uuid4
+
+import pytest
+from openai import OpenAI
+
+from llama_stack_client.lib.agents.agent import Agent
+from llama_stack_client.lib.agents.turn_events import (
+ TextDelta,
+ StepStarted,
+ TurnStarted,
+ StepProgress,
+ StepCompleted,
+ TurnCompleted,
+ ToolCallIssuedDelta,
+)
+
+# Test configuration
+MODEL_ID = os.environ.get("LLAMA_STACK_TEST_MODEL", "openai/gpt-4o")
+BASE_URL = os.environ.get("TEST_API_BASE_URL", "http://localhost:8321/v1")
+
+
+pytestmark = pytest.mark.skipif(
+ not BASE_URL or BASE_URL == "http://127.0.0.1:4010",
+ reason="requires a running llama stack server",
+)
+
+
+@pytest.fixture
+def openai_client():
+ return OpenAI(api_key="fake", base_url=BASE_URL)
+
+
+@pytest.fixture
+def agent_with_no_tools(openai_client):
+ """Create an agent with no tools for basic text-only tests."""
+ return Agent(
+ client=openai_client,
+ model=MODEL_ID,
+ instructions="You are a helpful assistant. Keep responses brief and concise.",
+ tools=None,
+ )
+
+
+@pytest.fixture
+def agent_with_file_search(openai_client):
+ """Create an agent with file_search tool (server-side)."""
+ # Create a vector store with test content (unique info to force tool use)
+ file_content = "Project Nightingale is a classified initiative. The project codename is BLUE_FALCON_7. The lead researcher is Dr. Elena Vasquez."
+ file_payload = io.BytesIO(file_content.encode("utf-8"))
+
+ uploaded_file = openai_client.files.create(
+ file=("test_knowledge.txt", file_payload, "text/plain"),
+ purpose="assistants",
+ )
+
+ vector_store = openai_client.vector_stores.create(
+ name=f"test-vs-{uuid4().hex[:8]}",
+ extra_body={
+ "provider_id": "faiss",
+ "embedding_model": "nomic-ai/nomic-embed-text-v1.5",
+ },
+ )
+ vector_store_file = openai_client.vector_stores.files.create(
+ vector_store_id=vector_store.id,
+ file_id=uploaded_file.id,
+ )
+
+ # Wait for vector store to be ready
+ deadline = time.time() + 60.0
+ while vector_store_file.status != "completed":
+ if vector_store_file.status in {"failed", "cancelled"}:
+ raise RuntimeError(f"Vector store ingestion failed: {vector_store_file.status}")
+ if time.time() > deadline:
+ raise TimeoutError("Vector store file ingest timed out")
+ time.sleep(0.5)
+ vector_store_file = openai_client.vector_stores.files.retrieve(
+ vector_store_id=vector_store.id,
+ file_id=vector_store_file.id,
+ )
+
+ return Agent(
+ client=openai_client,
+ model=MODEL_ID,
+ instructions="You MUST search the knowledge base to answer every question. Never answer from your own knowledge.",
+ tools=[{"type": "file_search", "vector_store_ids": [vector_store.id]}],
+ )
+
+
+def test_basic_turn_without_tools(agent_with_no_tools):
+ # Expected event sequence:
+ # 1. TurnStarted
+ # 2. StepStarted(inference)
+ # 3. StepProgress(TextDelta) x N
+ # 4. StepCompleted(inference)
+ # 5. TurnCompleted
+ agent = agent_with_no_tools
+ session_id = agent.create_session(f"test-session-{uuid4().hex[:8]}")
+
+ messages = [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [{"type": "input_text", "text": "Say hello in exactly 3 words."}],
+ }
+ ]
+
+ events = []
+ for chunk in agent.create_turn(messages=messages, session_id=session_id, stream=True):
+ events.append(chunk.event)
+
+ # Verify event sequence
+ assert len(events) > 0, "Should have at least some events"
+
+ # First event should be TurnStarted
+ assert isinstance(events[0], TurnStarted), f"First event should be TurnStarted, got {type(events[0])}"
+ assert events[0].session_id == session_id
+
+ # Second event should be StepStarted(inference)
+ assert isinstance(events[1], StepStarted), f"Second event should be StepStarted, got {type(events[1])}"
+ assert events[1].step_type == "inference"
+ assert events[1].metadata is None or events[1].metadata.get("server_side") is None
+
+ # Should have some StepProgress(TextDelta) events
+ text_deltas = [e for e in events if isinstance(e, StepProgress) and isinstance(e.delta, TextDelta)]
+ assert len(text_deltas) > 0, "Should have at least one text delta"
+
+ # Should have NO tool_execution steps (no tools configured)
+ tool_execution_starts = [e for e in events if isinstance(e, StepStarted) and e.step_type == "tool_execution"]
+ assert len(tool_execution_starts) == 0, "Should have no tool_execution steps without tools"
+
+ # Second-to-last event should be StepCompleted(inference)
+ inference_completes = [e for e in events if isinstance(e, StepCompleted) and e.step_type == "inference"]
+ assert len(inference_completes) >= 1, "Should have at least one inference step completion"
+
+ # Last inference completion should have no function calls
+ last_inference = inference_completes[-1]
+ assert len(last_inference.result.function_calls) == 0, "Should have no function calls"
+ assert last_inference.result.stop_reason == "end_of_turn"
+
+ # Last event should be TurnCompleted
+ assert isinstance(events[-1], TurnCompleted), f"Last event should be TurnCompleted, got {type(events[-1])}"
+ assert events[-1].session_id == session_id
+ assert len(events[-1].final_text) > 0, "Should have some final text"
+
+
+def test_server_side_file_search_tool(agent_with_file_search):
+ # Expected event sequence:
+ # 1. TurnStarted
+ # 2. StepStarted(inference) - model decides to search
+ # 3. StepProgress(TextDelta) - optional text before tool
+ # 4. StepCompleted(inference) - inference done, decided to use file_search
+ # 5. StepStarted(tool_execution, metadata.server_side=True)
+ # 6. StepCompleted(tool_execution) - file_search results
+ # 7. StepStarted(inference) - model processes results
+ # 8. StepProgress(TextDelta) - model generates response
+ # 9. StepCompleted(inference)
+ # 10. TurnCompleted
+ agent = agent_with_file_search
+ session_id = agent.create_session(f"test-session-{uuid4().hex[:8]}")
+
+ messages = [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [{"type": "input_text", "text": "What is the codename for Project Nightingale?"}],
+ }
+ ]
+
+ events = []
+ for chunk in agent.create_turn(messages=messages, session_id=session_id, stream=True):
+ events.append(chunk.event)
+
+ # Verify Turn started and completed
+ assert isinstance(events[0], TurnStarted)
+ assert isinstance(events[-1], TurnCompleted)
+
+ # KEY ASSERTION: Should have at least one tool_execution step (server-side file_search)
+ tool_execution_starts = [e for e in events if isinstance(e, StepStarted) and e.step_type == "tool_execution"]
+ assert len(tool_execution_starts) >= 1, (
+ f"Should have at least one tool_execution step, found {len(tool_execution_starts)}"
+ )
+
+ # KEY ASSERTION: The tool_execution step should be marked as server_side
+ file_search_step = tool_execution_starts[0]
+ assert file_search_step.metadata is not None, "Tool execution step should have metadata"
+ assert file_search_step.metadata.get("server_side") is True, "file_search should be marked as server_side"
+ assert file_search_step.metadata.get("tool_type") == "file_search", "Should identify as file_search tool"
+
+ # Should have tool_execution completion
+ tool_execution_completes = [e for e in events if isinstance(e, StepCompleted) and e.step_type == "tool_execution"]
+ assert len(tool_execution_completes) >= 1, "Should have at least one tool_execution completion"
+
+ # Should have multiple inference steps (before and after tool execution)
+ inference_starts = [e for e in events if isinstance(e, StepStarted) and e.step_type == "inference"]
+ assert len(inference_starts) >= 2, (
+ f"Should have at least 2 inference steps (before/after tool), found {len(inference_starts)}"
+ )
+
+ # Final response should contain the answer (BLUE_FALCON_7)
+ assert "BLUE_FALCON" in events[-1].final_text or "BLUE_FALCON_7" in events[-1].final_text, (
+ "Response should contain the codename"
+ )
+
+
+def test_client_side_function_tool(openai_client):
+ # We are going to test
+ # Expected event sequence:
+ # 1. TurnStarted
+ # 2. StepStarted(inference)
+ # 3. StepProgress(ToolCallIssuedDelta) - function call
+ # 4. StepCompleted(inference) - with function_calls
+ # 5. StepStarted(tool_execution, metadata.server_side=False)
+ # 6. StepCompleted(tool_execution) - client executed function
+ # 7. StepStarted(inference) - model processes results
+ # 8. StepProgress(TextDelta)
+ # 9. StepCompleted(inference)
+ # 10. TurnCompleted
+
+ # Create a function that returns data requiring model processing
+ def get_user_secret_token(user_id: str) -> str:
+ """Get the encrypted authentication token for a user from the secure database.
+
+ The token is returned in encrypted hex format and must be decoded by the AI.
+
+ :param user_id: The unique identifier of the user
+ """
+ import time
+ import hashlib
+
+ unique = f"{user_id}-{time.time()}-SECRET"
+ token_hash = hashlib.sha256(unique.encode()).hexdigest()[:16]
+ return f'{{"status": "success", "encrypted_token": "{token_hash}", "format": "hex", "expires_in_hours": 24}}'
+
+ agent = Agent(
+ client=openai_client,
+ model=MODEL_ID,
+ instructions="You are a helpful assistant. When retrieving tokens, you MUST call get_user_secret_token, then parse the JSON response and present it in a user-friendly format. Explain what the token is and when it expires.",
+ tools=[get_user_secret_token],
+ )
+
+ session_id = agent.create_session(f"test-session-{uuid4().hex[:8]}")
+ messages = [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Can you get me the authentication token for user_12345? Please explain what it is.",
+ }
+ ],
+ }
+ ]
+
+ events = []
+
+ for chunk in agent.create_turn(messages=messages, session_id=session_id, stream=True):
+ events.append(chunk.event)
+
+ # Verify Turn started and completed
+ assert isinstance(events[0], TurnStarted)
+ assert isinstance(events[-1], TurnCompleted)
+
+ # Should have ToolCallIssuedDelta for the function call
+ function_calls = [
+ e
+ for e in events
+ if isinstance(e, StepProgress) and isinstance(e.delta, ToolCallIssuedDelta) and e.delta.tool_type == "function"
+ ]
+ assert len(function_calls) >= 1, (
+ f"Should have at least one function call (get_user_secret_token), found {len(function_calls)}"
+ )
+
+ # KEY ASSERTION: Should have tool_execution step (client-side function)
+ tool_execution_starts = [e for e in events if isinstance(e, StepStarted) and e.step_type == "tool_execution"]
+ assert len(tool_execution_starts) >= 1, (
+ f"Should have at least one tool_execution step, found {len(tool_execution_starts)}"
+ )
+
+ # KEY ASSERTION: The tool_execution step should be marked as client-side
+ function_step = tool_execution_starts[0]
+ assert function_step.metadata is not None, "Tool execution step should have metadata"
+ assert function_step.metadata.get("server_side") is False, "Function tool should be marked as client_side"
+
+ # Should have at least one inference step (before tool execution)
+ inference_starts = [e for e in events if isinstance(e, StepStarted) and e.step_type == "inference"]
+ assert len(inference_starts) >= 1, f"Should have at least 1 inference step, found {len(inference_starts)}"
+
+ # Verify the tool was actually executed with proper arguments
+ tool_execution_completes = [e for e in events if isinstance(e, StepCompleted) and e.step_type == "tool_execution"]
+ assert len(tool_execution_completes) >= 1, "Should have at least one tool_execution completion"
+
+ # Final response should contain the token data (proves function was called and processed)
+ final_text = events[-1].final_text.lower()
+ assert "token" in final_text or "encrypted" in final_text, (
+ "Response should contain token information from function call"
+ )
+ assert "24" in events[-1].final_text or "hour" in final_text or "expire" in final_text, (
+ "Response should mention expiration from parsed JSON"
+ )
+
+
+if __name__ == "__main__":
+ # Allow running tests directly for development
+ pytest.main([__file__, "-v", "-s"])
diff --git a/tests/lib/agents/test_agent_responses.py b/tests/lib/agents/test_agent_responses.py
new file mode 100644
index 00000000..35ff2763
--- /dev/null
+++ b/tests/lib/agents/test_agent_responses.py
@@ -0,0 +1,212 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+from __future__ import annotations
+
+from types import SimpleNamespace
+from typing import Any, Dict, Iterable, Iterator, List, Optional
+
+import pytest
+
+from llama_stack_client.lib.agents.agent import Agent
+from llama_stack_client.lib.agents.client_tool import client_tool
+from llama_stack_client.lib.agents.turn_events import (
+ AgentStreamChunk,
+ StepCompleted,
+ StepProgress,
+ StepStarted,
+ ToolExecutionStepResult,
+ TurnCompleted,
+ TurnStarted,
+)
+
+
+def _event(event_type: str, **payload: Any) -> SimpleNamespace:
+ return SimpleNamespace(type=event_type, **payload)
+
+
+def _in_progress(response_id: str) -> SimpleNamespace:
+ return _event("response.in_progress", response=SimpleNamespace(id=response_id))
+
+
+def _completed(response_id: str, text: str) -> SimpleNamespace:
+ response = FakeResponse(response_id, text)
+ return _event("response.completed", response=response)
+
+
+class FakeResponse:
+ def __init__(self, response_id: str, text: str) -> None:
+ self.id = response_id
+ self.output_text = text
+ self.turn = SimpleNamespace(turn_id=f"turn_{response_id}")
+
+
+class FakeResponsesAPI:
+ def __init__(self, event_script: Iterable[Iterable[SimpleNamespace]]) -> None:
+ self._event_script: List[List[SimpleNamespace]] = [list(events) for events in event_script]
+ self.create_calls: List[Dict[str, Any]] = []
+
+ def create(self, **kwargs: Any) -> Iterator[SimpleNamespace]:
+ self.create_calls.append(kwargs)
+ if not self._event_script:
+ raise AssertionError("No scripted events left for responses.create")
+ return iter(self._event_script.pop(0))
+
+
+class FakeConversationsAPI:
+ def __init__(self) -> None:
+ self._counter = 0
+
+ def create(self, **_: Any) -> SimpleNamespace:
+ self._counter += 1
+ return SimpleNamespace(id=f"conv_{self._counter}")
+
+
+class FakeClient:
+ def __init__(self, event_script: Iterable[Iterable[SimpleNamespace]]) -> None:
+ self.responses = FakeResponsesAPI(event_script)
+ self.conversations = FakeConversationsAPI()
+
+
+def make_completion_events(response_id: str, text: str) -> List[SimpleNamespace]:
+ return [
+ _in_progress(response_id),
+ _event("response.output_text.delta", delta=text, output_index=0),
+ _completed(response_id, text),
+ ]
+
+
+def make_function_tool_events(response_id: str, call_id: str, tool_name: str, arguments: str) -> List[SimpleNamespace]:
+ tool_item = SimpleNamespace(type="function_call", call_id=call_id, name=tool_name, arguments=arguments)
+ return [
+ _in_progress(response_id),
+ _event("response.output_item.added", item=tool_item),
+ _event("response.output_item.done", item=tool_item),
+ _completed(response_id, ""),
+ ]
+
+
+def make_server_tool_events(response_id: str, call_id: str, arguments: str, final_text: str) -> List[SimpleNamespace]:
+ tool_item = SimpleNamespace(type="file_search_call", id=call_id, arguments=arguments)
+ completion = FakeResponse(response_id, final_text)
+ return [
+ _in_progress(response_id),
+ _event("response.output_item.added", item=tool_item),
+ _event("response.output_item.done", item=tool_item),
+ _event("response.output_text.delta", delta=final_text, output_index=0),
+ _completed(response_id, final_text),
+ ]
+
+
+def test_agent_tracks_multiple_sessions() -> None:
+ event_script = [
+ make_completion_events("resp_a1", "session A turn 1"),
+ make_completion_events("resp_b1", "session B turn 1"),
+ make_completion_events("resp_a2", "session A turn 2"),
+ ]
+
+ client = FakeClient(event_script)
+ agent = Agent(client=client, model="test-model", instructions="test")
+
+ session_a = agent.create_session("A")
+ session_b = agent.create_session("B")
+
+ message = {
+ "type": "message",
+ "role": "user",
+ "content": [{"type": "input_text", "text": "hi"}],
+ }
+
+ agent.create_turn([message], session_id=session_a, stream=False)
+ agent.create_turn([message], session_id=session_b, stream=False)
+ agent.create_turn([message], session_id=session_a, stream=False)
+
+ calls = client.responses.create_calls
+ assert calls[0]["conversation"] == session_a
+ assert calls[1]["conversation"] == session_b
+ assert calls[2]["conversation"] == session_a
+
+ assert agent._session_last_response_id[session_a] == "resp_a2"
+ assert agent._session_last_response_id[session_b] == "resp_b1"
+ assert agent._last_response_id == "resp_a2"
+
+
+def test_agent_handles_client_tool_and_finishes_turn() -> None:
+ tool_invocations: List[str] = []
+
+ @client_tool
+ def echo_tool(text: str) -> str:
+ """Echo text back to the caller.
+
+ :param text: value to echo
+ """
+ tool_invocations.append(text)
+ return text
+
+ event_script = [
+ make_function_tool_events("resp_intermediate", "call_1", "echo_tool", '{"text": "pong"}'),
+ make_completion_events("resp_final", "all done"),
+ ]
+
+ client = FakeClient(event_script)
+ agent = Agent(client=client, model="test-model", instructions="use tools", tools=[echo_tool])
+
+ session_id = agent.create_session("default")
+ message = {
+ "type": "message",
+ "role": "user",
+ "content": [{"type": "input_text", "text": "run the tool"}],
+ }
+
+ response = agent.create_turn([message], session_id=session_id, stream=False)
+
+ assert response.id == "resp_final"
+ assert response.output_text == "all done"
+ assert tool_invocations == ["pong"]
+ assert len(client.responses.create_calls) == 2
+
+
+def test_agent_streams_server_tool_events() -> None:
+ event_script = [
+ make_server_tool_events("resp_server", "server_call", '{"query": "docs"}', "tool finished"),
+ ]
+
+ client = FakeClient(event_script)
+ agent = Agent(client=client, model="test-model", instructions="use server tool")
+
+ session_id = agent.create_session("default")
+ message = {
+ "type": "message",
+ "role": "user",
+ "content": [{"type": "input_text", "text": "find info"}],
+ }
+
+ chunks = list(agent.create_turn([message], session_id=session_id, stream=True))
+
+ events = [chunk.event for chunk in chunks]
+ assert isinstance(events[0], TurnStarted)
+ assert isinstance(events[1], StepStarted)
+ assert events[1].step_type == "inference"
+
+ # Look for the tool execution step in the stream
+ tool_step_started = next(event for event in events if isinstance(event, StepStarted) and event.step_type == "tool_execution")
+ assert tool_step_started.metadata == {"server_side": True, "tool_type": "file_search", "tool_name": "file_search_call"}
+
+ tool_step_completed = next(
+ event for event in events if isinstance(event, StepCompleted) and event.step_type == "tool_execution"
+ )
+ assert isinstance(tool_step_completed.result, ToolExecutionStepResult)
+ assert tool_step_completed.result.tool_calls[0].call_id == "server_call"
+
+ text_progress = [
+ event.delta.text
+ for event in events
+ if isinstance(event, StepProgress) and hasattr(event.delta, "text")
+ ]
+ assert text_progress == ["tool finished"]
+
+ assert isinstance(events[-1], TurnCompleted)
+ assert chunks[-1].response and chunks[-1].response.output_text == "tool finished"
diff --git a/tests/test_client.py b/tests/test_client.py
index 708c7420..3ccb4d91 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py
index 52056b76..d63fb98d 100644
--- a/tests/test_deepcopy.py
+++ b/tests/test_deepcopy.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from llama_stack_client._utils import deepcopy_minimal
diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py
index 614e670a..a597a04f 100644
--- a/tests/test_extract_files.py
+++ b/tests/test_extract_files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from typing import Sequence
diff --git a/tests/test_files.py b/tests/test_files.py
index e4bcf976..91008825 100644
--- a/tests/test_files.py
+++ b/tests/test_files.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from pathlib import Path
import anyio
diff --git a/tests/test_models.py b/tests/test_models.py
index 396f2bf2..313ff0fc 100644
--- a/tests/test_models.py
+++ b/tests/test_models.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import json
from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast
from datetime import datetime, timezone
diff --git a/tests/test_qs.py b/tests/test_qs.py
index cff56e87..838c4549 100644
--- a/tests/test_qs.py
+++ b/tests/test_qs.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from typing import Any, cast
from functools import partial
from urllib.parse import unquote
diff --git a/tests/test_required_args.py b/tests/test_required_args.py
index 77bebd25..e01f4d70 100644
--- a/tests/test_required_args.py
+++ b/tests/test_required_args.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import pytest
diff --git a/tests/test_response.py b/tests/test_response.py
index 8c803a25..bf3796ed 100644
--- a/tests/test_response.py
+++ b/tests/test_response.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import json
from typing import Any, List, Union, cast
from typing_extensions import Annotated
diff --git a/tests/test_streaming.py b/tests/test_streaming.py
index 875d126d..e41edce5 100644
--- a/tests/test_streaming.py
+++ b/tests/test_streaming.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from typing import Iterator, AsyncIterator
diff --git a/tests/test_transform.py b/tests/test_transform.py
index b81e616f..d309e42c 100644
--- a/tests/test_transform.py
+++ b/tests/test_transform.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import io
diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py
index a95a9e55..9a3f30ed 100644
--- a/tests/test_utils/test_datetime_parse.py
+++ b/tests/test_utils/test_datetime_parse.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
"""
Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py
with modifications so it works without pydantic v1 imports.
diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py
index 76a29efd..c99cdc11 100644
--- a/tests/test_utils/test_proxy.py
+++ b/tests/test_utils/test_proxy.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
import operator
from typing import Any
from typing_extensions import override
diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py
index a38fbf5a..07ce18a2 100644
--- a/tests/test_utils/test_typing.py
+++ b/tests/test_utils/test_typing.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
from typing import Generic, TypeVar, cast
diff --git a/tests/utils.py b/tests/utils.py
index ba5e26bf..82710945 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -1,3 +1,9 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
from __future__ import annotations
import os
diff --git a/uv.lock b/uv.lock
index 053f41d2..935254f6 100644
--- a/uv.lock
+++ b/uv.lock
@@ -113,7 +113,7 @@ wheels = [
[[package]]
name = "black"
-version = "25.1.0"
+version = "25.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -121,18 +121,19 @@ dependencies = [
{ name = "packaging" },
{ name = "pathspec" },
{ name = "platformdirs" },
+ { name = "pytokens" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/4b/43/20b5c90612d7bdb2bdbcceeb53d588acca3bb8f0e4c5d5c751a2c8fdd55a/black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619", size = 648393, upload-time = "2025-09-19T00:27:37.758Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" },
- { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" },
- { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" },
- { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" },
- { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" },
- { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" },
- { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" },
- { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" },
- { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/8e/319cfe6c82f7e2d5bfb4d3353c6cc85b523d677ff59edc61fdb9ee275234/black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0", size = 1742012, upload-time = "2025-09-19T00:33:08.678Z" },
+ { url = "https://files.pythonhosted.org/packages/94/cc/f562fe5d0a40cd2a4e6ae3f685e4c36e365b1f7e494af99c26ff7f28117f/black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4", size = 1581421, upload-time = "2025-09-19T00:35:25.937Z" },
+ { url = "https://files.pythonhosted.org/packages/84/67/6db6dff1ebc8965fd7661498aea0da5d7301074b85bba8606a28f47ede4d/black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e", size = 1655619, upload-time = "2025-09-19T00:30:49.241Z" },
+ { url = "https://files.pythonhosted.org/packages/10/10/3faef9aa2a730306cf469d76f7f155a8cc1f66e74781298df0ba31f8b4c8/black-25.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:77e7060a00c5ec4b3367c55f39cf9b06e68965a4f2e61cecacd6d0d9b7ec945a", size = 1342481, upload-time = "2025-09-19T00:31:29.625Z" },
+ { url = "https://files.pythonhosted.org/packages/48/99/3acfea65f5e79f45472c45f87ec13037b506522719cd9d4ac86484ff51ac/black-25.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0172a012f725b792c358d57fe7b6b6e8e67375dd157f64fa7a3097b3ed3e2175", size = 1742165, upload-time = "2025-09-19T00:34:10.402Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/18/799285282c8236a79f25d590f0222dbd6850e14b060dfaa3e720241fd772/black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f", size = 1581259, upload-time = "2025-09-19T00:32:49.685Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/ce/883ec4b6303acdeca93ee06b7622f1fa383c6b3765294824165d49b1a86b/black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831", size = 1655583, upload-time = "2025-09-19T00:30:44.505Z" },
+ { url = "https://files.pythonhosted.org/packages/21/17/5c253aa80a0639ccc427a5c7144534b661505ae2b5a10b77ebe13fa25334/black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357", size = 1343428, upload-time = "2025-09-19T00:32:13.839Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/46/863c90dcd3f9d41b109b7f19032ae0db021f0b2a81482ba0a1e28c84de86/black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae", size = 203363, upload-time = "2025-09-19T00:27:35.724Z" },
]
[[package]]
@@ -155,56 +156,71 @@ wheels = [
[[package]]
name = "charset-normalizer"
-version = "3.4.3"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
- { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
- { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
- { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
- { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
- { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
- { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
- { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
- { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
- { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
- { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
- { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" },
- { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" },
- { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" },
- { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" },
- { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" },
- { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" },
- { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" },
- { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" },
- { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" },
- { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" },
- { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" },
- { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" },
- { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" },
- { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" },
- { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" },
- { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" },
- { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
- { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
- { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
- { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
- { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
- { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
- { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
+version = "3.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
+ { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
+ { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
+ { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
+ { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
+ { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
+ { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
+ { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
+ { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
+ { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
+ { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
+ { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
+ { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
+ { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
+ { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
+ { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
+ { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
+ { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
+ { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
]
[[package]]
name = "click"
-version = "8.2.1"
+version = "8.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32' or (extra == 'group-18-llama-stack-client-pydantic-v1' and extra == 'group-18-llama-stack-client-pydantic-v2')" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
+ { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" },
]
[[package]]
@@ -254,11 +270,11 @@ wheels = [
[[package]]
name = "filelock"
-version = "3.19.1"
+version = "3.20.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
+ { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" },
]
[[package]]
@@ -372,24 +388,24 @@ wheels = [
[[package]]
name = "httpx-aiohttp"
-version = "0.1.8"
+version = "0.1.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
{ name = "httpx" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/37/19/ae2d2bf1f57fdd23c8ad83675599fb5c407fa13bc20e90f00cffa4dea3aa/httpx_aiohttp-0.1.8.tar.gz", hash = "sha256:756c5e74cdb568c3248ba63fe82bfe8bbe64b928728720f7eaac64b3cf46f308", size = 25401, upload-time = "2025-07-04T10:40:32.329Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/f2/9a86ce9bc48cf57dabb3a3160dfed26d8bbe5a2478a51f9d1dbf89f2f1fc/httpx_aiohttp-0.1.9.tar.gz", hash = "sha256:4ee8b22e6f2e7c80cd03be29eff98bfe7d89bd77f021ce0b578ee76b73b4bfe6", size = 206023, upload-time = "2025-10-15T08:52:57.475Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/54/7a/514c484b88cc4ebbcd2e27e92b86019c0c5bb920582f5fbb10b7e6c78574/httpx_aiohttp-0.1.8-py3-none-any.whl", hash = "sha256:b7bd958d1331f3759a38a0ba22ad29832cb63ca69498c17735228055bf78fa7e", size = 6180, upload-time = "2025-07-04T10:40:31.522Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/db/5cfa8254a86c34a1ab7fe0dbec9f81bb5ebd831cbdd65aa4be4f37027804/httpx_aiohttp-0.1.9-py3-none-any.whl", hash = "sha256:3dc2845568b07742588710fcf3d72db2cbcdf2acc93376edf85f789c4d8e5fda", size = 6180, upload-time = "2025-10-15T08:52:56.521Z" },
]
[[package]]
name = "identify"
-version = "2.6.14"
+version = "2.6.15"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" },
]
[[package]]
@@ -424,7 +440,7 @@ wheels = [
[[package]]
name = "llama-stack-client"
-version = "0.2.23"
+version = "0.3.0"
source = { editable = "." }
dependencies = [
{ name = "anyio" },
@@ -482,7 +498,7 @@ requires-dist = [
{ name = "distro", specifier = ">=1.7.0,<2" },
{ name = "fire" },
{ name = "httpx", specifier = ">=0.23.0,<1" },
- { name = "httpx-aiohttp", marker = "extra == 'aiohttp'", specifier = ">=0.1.8" },
+ { name = "httpx-aiohttp", marker = "extra == 'aiohttp'", specifier = ">=0.1.9" },
{ name = "pandas" },
{ name = "prompt-toolkit" },
{ name = "pyaml" },
@@ -644,65 +660,65 @@ wheels = [
[[package]]
name = "numpy"
-version = "2.3.3"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" },
- { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" },
- { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" },
- { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" },
- { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" },
- { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" },
- { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" },
- { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" },
- { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" },
- { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" },
- { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" },
- { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" },
- { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" },
- { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" },
- { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" },
- { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" },
- { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" },
- { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" },
- { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" },
- { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" },
- { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" },
- { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" },
- { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" },
- { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" },
- { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" },
- { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" },
- { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" },
- { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" },
- { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" },
- { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" },
- { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" },
- { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" },
- { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" },
- { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" },
- { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" },
- { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" },
- { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" },
- { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" },
- { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" },
- { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" },
- { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" },
- { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" },
- { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" },
- { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" },
- { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" },
- { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" },
- { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" },
- { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" },
- { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" },
- { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" },
- { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" },
- { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" },
- { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" },
- { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" },
- { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" },
+version = "2.3.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" },
+ { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" },
+ { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" },
+ { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" },
+ { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" },
+ { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" },
+ { url = "https://files.pythonhosted.org/packages/57/7e/b72610cc91edf138bc588df5150957a4937221ca6058b825b4725c27be62/numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966", size = 20950335, upload-time = "2025-10-15T16:16:10.304Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/46/bdd3370dcea2f95ef14af79dbf81e6927102ddf1cc54adc0024d61252fd9/numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3", size = 14179878, upload-time = "2025-10-15T16:16:12.595Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/01/5a67cb785bda60f45415d09c2bc245433f1c68dd82eef9c9002c508b5a65/numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197", size = 5108673, upload-time = "2025-10-15T16:16:14.877Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/cd/8428e23a9fcebd33988f4cb61208fda832800ca03781f471f3727a820704/numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e", size = 6641438, upload-time = "2025-10-15T16:16:16.805Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/d1/913fe563820f3c6b079f992458f7331278dcd7ba8427e8e745af37ddb44f/numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7", size = 14281290, upload-time = "2025-10-15T16:16:18.764Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/7e/7d306ff7cb143e6d975cfa7eb98a93e73495c4deabb7d1b5ecf09ea0fd69/numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953", size = 16636543, upload-time = "2025-10-15T16:16:21.072Z" },
+ { url = "https://files.pythonhosted.org/packages/47/6a/8cfc486237e56ccfb0db234945552a557ca266f022d281a2f577b98e955c/numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37", size = 16056117, upload-time = "2025-10-15T16:16:23.369Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/0e/42cb5e69ea901e06ce24bfcc4b5664a56f950a70efdcf221f30d9615f3f3/numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd", size = 18577788, upload-time = "2025-10-15T16:16:27.496Z" },
+ { url = "https://files.pythonhosted.org/packages/86/92/41c3d5157d3177559ef0a35da50f0cda7fa071f4ba2306dd36818591a5bc/numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646", size = 6282620, upload-time = "2025-10-15T16:16:29.811Z" },
+ { url = "https://files.pythonhosted.org/packages/09/97/fd421e8bc50766665ad35536c2bb4ef916533ba1fdd053a62d96cc7c8b95/numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d", size = 12784672, upload-time = "2025-10-15T16:16:31.589Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/df/5474fb2f74970ca8eb978093969b125a84cc3d30e47f82191f981f13a8a0/numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc", size = 10196702, upload-time = "2025-10-15T16:16:33.902Z" },
+ { url = "https://files.pythonhosted.org/packages/11/83/66ac031464ec1767ea3ed48ce40f615eb441072945e98693bec0bcd056cc/numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879", size = 21049003, upload-time = "2025-10-15T16:16:36.101Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/99/5b14e0e686e61371659a1d5bebd04596b1d72227ce36eed121bb0aeab798/numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562", size = 14302980, upload-time = "2025-10-15T16:16:39.124Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/44/e9486649cd087d9fc6920e3fc3ac2aba10838d10804b1e179fb7cbc4e634/numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a", size = 5231472, upload-time = "2025-10-15T16:16:41.168Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/51/902b24fa8887e5fe2063fd61b1895a476d0bbf46811ab0c7fdf4bd127345/numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6", size = 6739342, upload-time = "2025-10-15T16:16:43.777Z" },
+ { url = "https://files.pythonhosted.org/packages/34/f1/4de9586d05b1962acdcdb1dc4af6646361a643f8c864cef7c852bf509740/numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7", size = 14354338, upload-time = "2025-10-15T16:16:46.081Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/06/1c16103b425de7969d5a76bdf5ada0804b476fed05d5f9e17b777f1cbefd/numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0", size = 16702392, upload-time = "2025-10-15T16:16:48.455Z" },
+ { url = "https://files.pythonhosted.org/packages/34/b2/65f4dc1b89b5322093572b6e55161bb42e3e0487067af73627f795cc9d47/numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f", size = 16134998, upload-time = "2025-10-15T16:16:51.114Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/11/94ec578896cdb973aaf56425d6c7f2aff4186a5c00fac15ff2ec46998b46/numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64", size = 18651574, upload-time = "2025-10-15T16:16:53.429Z" },
+ { url = "https://files.pythonhosted.org/packages/62/b7/7efa763ab33dbccf56dade36938a77345ce8e8192d6b39e470ca25ff3cd0/numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb", size = 6413135, upload-time = "2025-10-15T16:16:55.992Z" },
+ { url = "https://files.pythonhosted.org/packages/43/70/aba4c38e8400abcc2f345e13d972fb36c26409b3e644366db7649015f291/numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c", size = 12928582, upload-time = "2025-10-15T16:16:57.943Z" },
+ { url = "https://files.pythonhosted.org/packages/67/63/871fad5f0073fc00fbbdd7232962ea1ac40eeaae2bba66c76214f7954236/numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40", size = 10266691, upload-time = "2025-10-15T16:17:00.048Z" },
+ { url = "https://files.pythonhosted.org/packages/72/71/ae6170143c115732470ae3a2d01512870dd16e0953f8a6dc89525696069b/numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e", size = 20955580, upload-time = "2025-10-15T16:17:02.509Z" },
+ { url = "https://files.pythonhosted.org/packages/af/39/4be9222ffd6ca8a30eda033d5f753276a9c3426c397bb137d8e19dedd200/numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff", size = 14188056, upload-time = "2025-10-15T16:17:04.873Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/3d/d85f6700d0a4aa4f9491030e1021c2b2b7421b2b38d01acd16734a2bfdc7/numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f", size = 5116555, upload-time = "2025-10-15T16:17:07.499Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/04/82c1467d86f47eee8a19a464c92f90a9bb68ccf14a54c5224d7031241ffb/numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b", size = 6643581, upload-time = "2025-10-15T16:17:09.774Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/d3/c79841741b837e293f48bd7db89d0ac7a4f2503b382b78a790ef1dc778a5/numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7", size = 14299186, upload-time = "2025-10-15T16:17:11.937Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/7e/4a14a769741fbf237eec5a12a2cbc7a4c4e061852b6533bcb9e9a796c908/numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2", size = 16638601, upload-time = "2025-10-15T16:17:14.391Z" },
+ { url = "https://files.pythonhosted.org/packages/93/87/1c1de269f002ff0a41173fe01dcc925f4ecff59264cd8f96cf3b60d12c9b/numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52", size = 16074219, upload-time = "2025-10-15T16:17:17.058Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/28/18f72ee77408e40a76d691001ae599e712ca2a47ddd2c4f695b16c65f077/numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26", size = 18576702, upload-time = "2025-10-15T16:17:19.379Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/76/95650169b465ececa8cf4b2e8f6df255d4bf662775e797ade2025cc51ae6/numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc", size = 6337136, upload-time = "2025-10-15T16:17:22.886Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/89/a231a5c43ede5d6f77ba4a91e915a87dea4aeea76560ba4d2bf185c683f0/numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9", size = 12920542, upload-time = "2025-10-15T16:17:24.783Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/0c/ae9434a888f717c5ed2ff2393b3f344f0ff6f1c793519fa0c540461dc530/numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868", size = 10480213, upload-time = "2025-10-15T16:17:26.935Z" },
+ { url = "https://files.pythonhosted.org/packages/83/4b/c4a5f0841f92536f6b9592694a5b5f68c9ab37b775ff342649eadf9055d3/numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec", size = 21052280, upload-time = "2025-10-15T16:17:29.638Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/80/90308845fc93b984d2cc96d83e2324ce8ad1fd6efea81b324cba4b673854/numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3", size = 14302930, upload-time = "2025-10-15T16:17:32.384Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/4e/07439f22f2a3b247cec4d63a713faae55e1141a36e77fb212881f7cda3fb/numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365", size = 5231504, upload-time = "2025-10-15T16:17:34.515Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/de/1e11f2547e2fe3d00482b19721855348b94ada8359aef5d40dd57bfae9df/numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252", size = 6739405, upload-time = "2025-10-15T16:17:36.128Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/40/8cd57393a26cebe2e923005db5134a946c62fa56a1087dc7c478f3e30837/numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e", size = 14354866, upload-time = "2025-10-15T16:17:38.884Z" },
+ { url = "https://files.pythonhosted.org/packages/93/39/5b3510f023f96874ee6fea2e40dfa99313a00bf3ab779f3c92978f34aace/numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0", size = 16703296, upload-time = "2025-10-15T16:17:41.564Z" },
+ { url = "https://files.pythonhosted.org/packages/41/0d/19bb163617c8045209c1996c4e427bccbc4bbff1e2c711f39203c8ddbb4a/numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0", size = 16136046, upload-time = "2025-10-15T16:17:43.901Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/c1/6dba12fdf68b02a21ac411c9df19afa66bed2540f467150ca64d246b463d/numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f", size = 18652691, upload-time = "2025-10-15T16:17:46.247Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/73/f85056701dbbbb910c51d846c58d29fd46b30eecd2b6ba760fc8b8a1641b/numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d", size = 6485782, upload-time = "2025-10-15T16:17:48.872Z" },
+ { url = "https://files.pythonhosted.org/packages/17/90/28fa6f9865181cb817c2471ee65678afa8a7e2a1fb16141473d5fa6bacc3/numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6", size = 13113301, upload-time = "2025-10-15T16:17:50.938Z" },
+ { url = "https://files.pythonhosted.org/packages/54/23/08c002201a8e7e1f9afba93b97deceb813252d9cfd0d3351caed123dcf97/numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29", size = 10547532, upload-time = "2025-10-15T16:17:53.48Z" },
]
[[package]]
@@ -716,7 +732,7 @@ wheels = [
[[package]]
name = "pandas"
-version = "2.3.2"
+version = "2.3.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
@@ -724,28 +740,41 @@ dependencies = [
{ name = "pytz" },
{ name = "tzdata" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/79/8e/0e90233ac205ad182bd6b422532695d2b9414944a280488105d598c70023/pandas-2.3.2.tar.gz", hash = "sha256:ab7b58f8f82706890924ccdfb5f48002b83d2b5a3845976a9fb705d36c34dcdb", size = 4488684, upload-time = "2025-08-21T10:28:29.257Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/ec/db/614c20fb7a85a14828edd23f1c02db58a30abf3ce76f38806155d160313c/pandas-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fbb977f802156e7a3f829e9d1d5398f6192375a3e2d1a9ee0803e35fe70a2b9", size = 11587652, upload-time = "2025-08-21T10:27:15.888Z" },
- { url = "https://files.pythonhosted.org/packages/99/b0/756e52f6582cade5e746f19bad0517ff27ba9c73404607c0306585c201b3/pandas-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b9b52693123dd234b7c985c68b709b0b009f4521000d0525f2b95c22f15944b", size = 10717686, upload-time = "2025-08-21T10:27:18.486Z" },
- { url = "https://files.pythonhosted.org/packages/37/4c/dd5ccc1e357abfeee8353123282de17997f90ff67855f86154e5a13b81e5/pandas-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd281310d4f412733f319a5bc552f86d62cddc5f51d2e392c8787335c994175", size = 11278722, upload-time = "2025-08-21T10:27:21.149Z" },
- { url = "https://files.pythonhosted.org/packages/d3/a4/f7edcfa47e0a88cda0be8b068a5bae710bf264f867edfdf7b71584ace362/pandas-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96d31a6b4354e3b9b8a2c848af75d31da390657e3ac6f30c05c82068b9ed79b9", size = 11987803, upload-time = "2025-08-21T10:27:23.767Z" },
- { url = "https://files.pythonhosted.org/packages/f6/61/1bce4129f93ab66f1c68b7ed1c12bac6a70b1b56c5dab359c6bbcd480b52/pandas-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df4df0b9d02bb873a106971bb85d448378ef14b86ba96f035f50bbd3688456b4", size = 12766345, upload-time = "2025-08-21T10:27:26.6Z" },
- { url = "https://files.pythonhosted.org/packages/8e/46/80d53de70fee835531da3a1dae827a1e76e77a43ad22a8cd0f8142b61587/pandas-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:213a5adf93d020b74327cb2c1b842884dbdd37f895f42dcc2f09d451d949f811", size = 13439314, upload-time = "2025-08-21T10:27:29.213Z" },
- { url = "https://files.pythonhosted.org/packages/28/30/8114832daff7489f179971dbc1d854109b7f4365a546e3ea75b6516cea95/pandas-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c13b81a9347eb8c7548f53fd9a4f08d4dfe996836543f805c987bafa03317ae", size = 10983326, upload-time = "2025-08-21T10:27:31.901Z" },
- { url = "https://files.pythonhosted.org/packages/27/64/a2f7bf678af502e16b472527735d168b22b7824e45a4d7e96a4fbb634b59/pandas-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c6ecbac99a354a051ef21c5307601093cb9e0f4b1855984a084bfec9302699e", size = 11531061, upload-time = "2025-08-21T10:27:34.647Z" },
- { url = "https://files.pythonhosted.org/packages/54/4c/c3d21b2b7769ef2f4c2b9299fcadd601efa6729f1357a8dbce8dd949ed70/pandas-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6f048aa0fd080d6a06cc7e7537c09b53be6642d330ac6f54a600c3ace857ee9", size = 10668666, upload-time = "2025-08-21T10:27:37.203Z" },
- { url = "https://files.pythonhosted.org/packages/50/e2/f775ba76ecfb3424d7f5862620841cf0edb592e9abd2d2a5387d305fe7a8/pandas-2.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0064187b80a5be6f2f9c9d6bdde29372468751dfa89f4211a3c5871854cfbf7a", size = 11332835, upload-time = "2025-08-21T10:27:40.188Z" },
- { url = "https://files.pythonhosted.org/packages/8f/52/0634adaace9be2d8cac9ef78f05c47f3a675882e068438b9d7ec7ef0c13f/pandas-2.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac8c320bded4718b298281339c1a50fb00a6ba78cb2a63521c39bec95b0209b", size = 12057211, upload-time = "2025-08-21T10:27:43.117Z" },
- { url = "https://files.pythonhosted.org/packages/0b/9d/2df913f14b2deb9c748975fdb2491da1a78773debb25abbc7cbc67c6b549/pandas-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:114c2fe4f4328cf98ce5716d1532f3ab79c5919f95a9cfee81d9140064a2e4d6", size = 12749277, upload-time = "2025-08-21T10:27:45.474Z" },
- { url = "https://files.pythonhosted.org/packages/87/af/da1a2417026bd14d98c236dba88e39837182459d29dcfcea510b2ac9e8a1/pandas-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:48fa91c4dfb3b2b9bfdb5c24cd3567575f4e13f9636810462ffed8925352be5a", size = 13415256, upload-time = "2025-08-21T10:27:49.885Z" },
- { url = "https://files.pythonhosted.org/packages/22/3c/f2af1ce8840ef648584a6156489636b5692c162771918aa95707c165ad2b/pandas-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:12d039facec710f7ba305786837d0225a3444af7bbd9c15c32ca2d40d157ed8b", size = 10982579, upload-time = "2025-08-21T10:28:08.435Z" },
- { url = "https://files.pythonhosted.org/packages/f3/98/8df69c4097a6719e357dc249bf437b8efbde808038268e584421696cbddf/pandas-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c624b615ce97864eb588779ed4046186f967374185c047070545253a52ab2d57", size = 12028163, upload-time = "2025-08-21T10:27:52.232Z" },
- { url = "https://files.pythonhosted.org/packages/0e/23/f95cbcbea319f349e10ff90db488b905c6883f03cbabd34f6b03cbc3c044/pandas-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0cee69d583b9b128823d9514171cabb6861e09409af805b54459bd0c821a35c2", size = 11391860, upload-time = "2025-08-21T10:27:54.673Z" },
- { url = "https://files.pythonhosted.org/packages/ad/1b/6a984e98c4abee22058aa75bfb8eb90dce58cf8d7296f8bc56c14bc330b0/pandas-2.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2319656ed81124982900b4c37f0e0c58c015af9a7bbc62342ba5ad07ace82ba9", size = 11309830, upload-time = "2025-08-21T10:27:56.957Z" },
- { url = "https://files.pythonhosted.org/packages/15/d5/f0486090eb18dd8710bf60afeaf638ba6817047c0c8ae5c6a25598665609/pandas-2.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b37205ad6f00d52f16b6d09f406434ba928c1a1966e2771006a9033c736d30d2", size = 11883216, upload-time = "2025-08-21T10:27:59.302Z" },
- { url = "https://files.pythonhosted.org/packages/10/86/692050c119696da19e20245bbd650d8dfca6ceb577da027c3a73c62a047e/pandas-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:837248b4fc3a9b83b9c6214699a13f069dc13510a6a6d7f9ba33145d2841a012", size = 12699743, upload-time = "2025-08-21T10:28:02.447Z" },
- { url = "https://files.pythonhosted.org/packages/cd/d7/612123674d7b17cf345aad0a10289b2a384bff404e0463a83c4a3a59d205/pandas-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d2c3554bd31b731cd6490d94a28f3abb8dd770634a9e06eb6d2911b9827db370", size = 13186141, upload-time = "2025-08-21T10:28:05.377Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" },
+ { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" },
+ { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" },
+ { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" },
+ { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" },
+ { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" },
+ { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" },
+ { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" },
+ { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" },
+ { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" },
+ { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" },
+ { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" },
+ { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" },
+ { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" },
+ { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" },
+ { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" },
+ { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" },
]
[[package]]
@@ -759,11 +788,11 @@ wheels = [
[[package]]
name = "platformdirs"
-version = "4.4.0"
+version = "4.5.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" },
+ { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
]
[[package]]
@@ -1030,6 +1059,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
]
+[[package]]
+name = "pytokens"
+version = "0.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d4/c2/dbadcdddb412a267585459142bfd7cc241e6276db69339353ae6e241ab2b/pytokens-0.2.0.tar.gz", hash = "sha256:532d6421364e5869ea57a9523bf385f02586d4662acbcc0342afd69511b4dd43", size = 15368, upload-time = "2025-10-15T08:02:42.738Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/89/5a/c269ea6b348b6f2c32686635df89f32dbe05df1088dd4579302a6f8f99af/pytokens-0.2.0-py3-none-any.whl", hash = "sha256:74d4b318c67f4295c13782ddd9abcb7e297ec5630ad060eb90abf7ebbefe59f8", size = 12038, upload-time = "2025-10-15T08:02:41.694Z" },
+]
+
[[package]]
name = "pytz"
version = "2024.2"
@@ -1041,28 +1079,48 @@ wheels = [
[[package]]
name = "pyyaml"
-version = "6.0.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
- { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
- { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
- { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" },
- { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" },
- { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" },
- { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" },
- { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" },
- { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" },
- { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" },
- { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" },
- { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" },
- { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" },
- { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" },
- { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" },
- { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" },
- { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" },
- { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
+ { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
+ { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
+ { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
+ { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
]
[[package]]
@@ -1243,25 +1301,25 @@ wheels = [
[[package]]
name = "virtualenv"
-version = "20.34.0"
+version = "20.35.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "distlib" },
{ name = "filelock" },
{ name = "platformdirs" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a4/d5/b0ccd381d55c8f45d46f77df6ae59fbc23d19e901e2d523395598e5f4c93/virtualenv-20.35.3.tar.gz", hash = "sha256:4f1a845d131133bdff10590489610c98c168ff99dc75d6c96853801f7f67af44", size = 6002907, upload-time = "2025-10-10T21:23:33.178Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" },
+ { url = "https://files.pythonhosted.org/packages/27/73/d9a94da0e9d470a543c1b9d3ccbceb0f59455983088e727b8a1824ed90fb/virtualenv-20.35.3-py3-none-any.whl", hash = "sha256:63d106565078d8c8d0b206d48080f938a8b25361e19432d2c9db40d2899c810a", size = 5981061, upload-time = "2025-10-10T21:23:30.433Z" },
]
[[package]]
name = "wcwidth"
-version = "0.2.13"
+version = "0.2.14"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
+ { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
]
[[package]]