diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..047f115 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,38 @@ +name: Publish Package to PyPI +on: + push: + tags: + - 'v*' +jobs: + build-n-publish: + name: Build and Publish + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/typesense + permissions: + id-token: write + contents: read + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + cache: pip + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: | + rm -rf dist/ + python -m build + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + attestations: true diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml new file mode 100644 index 0000000..8101806 --- /dev/null +++ b/.github/workflows/test-and-lint.yml @@ -0,0 +1,61 @@ +name: Test and Lint + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + quality: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Start Typesense + run: | + docker run -d \ + -p 8108:8108 \ + --name typesense \ + -v /tmp/typesense-data:/data \ + -v /tmp/typesense-analytics-data:/analytics-data \ + typesense/typesense:30.0.alpha1 \ + --api-key=xyz \ + --data-dir=/data \ + --enable-search-analytics=true \ + --analytics-dir=/analytics-data \ + --analytics-flush-interval=60 \ + --analytics-minute-rate-limit=50 \ + --enable-cors + + - name: Wait for Typesense + run: | + timeout 20 bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:8108/health)" != "200" ]]; do sleep 1; done' || false + + - uses: actions/checkout@v4 + + - name: Install uv and set the python version + uses: astral-sh/setup-uv@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install the project + run: uv sync --locked --all-extras --dev + + - name: Check sync generation + run: uv run python utils/run-unasync.py --check + + - name: Lint with Ruff + run: | + uv run ruff check src/typesense + uv run ruff format src/typesense + + - name: Check types with mypy + run: | + uv run mypy src/typesense + + - name: Run tests and coverage (excluding OpenAI) + run: | + uv run coverage run -m pytest -m "not open_ai" diff --git a/.gitignore b/.gitignore index 62a81ee..aa32048 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,7 @@ build/ *.swp *.egg* env/ -.cache/ \ No newline at end of file +.cache/ +.idea +venv +sample \ No newline at end of file diff --git a/README.md b/README.md index 30a5855..9537087 100644 --- a/README.md +++ b/README.md @@ -16,22 +16,56 @@ You can find some examples [here](https://github.com/typesense/typesense-python/ See detailed [API documentation](https://typesense.org/api). +## Async usage + +Use `AsyncClient` when working in an async runtime: + +```python +import asyncio +import typesense + + +async def main() -> None: + client = typesense.AsyncClient({ + "api_key": "abcd", + "nodes": [{"host": "localhost", "port": "8108", "protocol": "http"}], + "connection_timeout_seconds": 2, + }) + + print(await client.collections.retrieve()) + await client.api_call.aclose() + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +See `examples/async_collection_operations.py` for a fuller async walkthrough. + ## Compatibility | Typesense Server | typesense-python | -|------------------|----------------| -| \>= v0.19.0 | \>= v0.10.0 | -| \>= v0.17.0 | \>= v0.9.0 | -| \>= v0.16.0 | \>= v0.8.0 | -| \>= v0.15.0 | \>= v0.7.0 | +|------------------|------------------| +| \>= v30.0 | \>= v2.0.0 | +| \>= v28.0 | \>= v1.0.0 | +| \>= v26.0 | \>= v0.20.0 | +| \>= v0.25.0 | \>= v0.16.0 | +| \>= v0.23.0 | \>= v0.14.0 | +| \>= v0.21.0 | \>= v0.13.0 | +| \>= v0.20.0 | \>= v0.11.0 | +| \>= v0.19.0 | \>= v0.10.0 | +| \>= v0.17.0 | \>= v0.9.0 | +| \>= v0.16.0 | \>= v0.8.0 | +| \>= v0.15.0 | \>= v0.7.0 | ## Contributing +> [!NOTE] +> Development happens in async-only code; sync code is generated automatically via `utils/run-unasync.py`. + Bug reports and pull requests are welcome on GitHub at [https://github.com/typesense/typesense-python]. +If you change any part of the client's source code, run `uv run utils/run-unasync.py` before opening a PR to keep the generated sync files in sync. ## License `typesense-python` is distributed under the Apache 2 license. - - - diff --git a/examples/analytics_operations.py b/examples/analytics_operations.py new file mode 100644 index 0000000..6593baf --- /dev/null +++ b/examples/analytics_operations.py @@ -0,0 +1,58 @@ +import typesense + +client = typesense.Client({ + 'api_key': 'abcd', + 'nodes': [{ + 'host': 'localhost', + 'port': '8108', + 'protocol': 'http' + }], + 'connection_timeout_seconds': 2 +}) + +# Drop pre-existing rule if any +try: + client.analyticsV1.rules['top_queries'].delete() +except Exception as e: + pass + +# Create a new rule +create_response = client.analyticsV1.rules.create({ + "name": "top_queries", + "type": "popular_queries", + "params": { + "source": { + "collections": ["products"] + }, + "destination": { + "collection": "top_queries" + }, + "limit": 1000 + } +}) +print(create_response) + +# Try to fetch it back +print(client.analyticsV1.rules['top_queries'].retrieve()) + +# Update the rule +update_response = client.analyticsV1.rules.upsert('top_queries', { + "name": "top_queries", + "type": "popular_queries", + "params": { + "source": { + "collections": ["products"] + }, + "destination": { + "collection": "top_queries" + }, + "limit": 100 + } +}) +print(update_response) + +# List all rules +print(client.analyticsV1.rules.retrieve()) + +# Delete the rule +print(client.analyticsV1.rules['top_queries'].delete()) diff --git a/examples/async_collection_operations.py b/examples/async_collection_operations.py new file mode 100644 index 0000000..c4d8635 --- /dev/null +++ b/examples/async_collection_operations.py @@ -0,0 +1,196 @@ +import asyncio +import json +import os +import sys + +curr_dir = os.path.dirname(os.path.realpath(__file__)) +repo_root = os.path.abspath(os.path.join(curr_dir, os.pardir)) +sys.path.insert(1, os.path.join(repo_root, "src")) + +import typesense +from typesense.exceptions import TypesenseClientError + + +async def main() -> None: + client = typesense.AsyncClient( + { + "api_key": "xyz", + "nodes": [ + { + "host": "localhost", + "port": "8108", + "protocol": "http", + } + ], + "connection_timeout_seconds": 2, + } + ) + + try: + # Drop pre-existing collection if any + try: + await client.collections["books"].delete() + except Exception: + pass + + # Create a collection + create_response = await client.collections.create( + { + "name": "books", + "fields": [ + {"name": "title", "type": "string"}, + {"name": "authors", "type": "string[]", "facet": True}, + {"name": "publication_year", "type": "int32", "facet": True}, + {"name": "ratings_count", "type": "int32"}, + {"name": "average_rating", "type": "float"}, + {"name": "image_url", "type": "string"}, + ], + "default_sorting_field": "ratings_count", + } + ) + + print(create_response) + + # Retrieve the collection we just created + retrieve_response = await client.collections["books"].retrieve() + print(retrieve_response) + + # Try retrieving all collections + retrieve_all_response = await client.collections.retrieve() + print(retrieve_all_response) + + # Add a book + hunger_games_book = { + "id": "1", + "authors": ["Suzanne Collins"], + "average_rating": 4.34, + "publication_year": 2008, + "title": "The Hunger Games", + "image_url": "https://images.gr-assets.com/books/1447303603m/2767052.jpg", + "ratings_count": 4780653, + } + + await client.collections["books"].documents.create(hunger_games_book) + + # Upsert the same document + print(await client.collections["books"].documents.upsert(hunger_games_book)) + + # Or update it + hunger_games_book_updated = {"id": "1", "average_rating": 4.45} + print( + await client.collections["books"] + .documents["1"] + .update(hunger_games_book_updated) + ) + + # Try updating with bad data (with coercion enabled) + hunger_games_book_updated = {"id": "1", "average_rating": "4.55"} + print( + await client.collections["books"] + .documents["1"] + .update(hunger_games_book_updated, {"dirty_values": "coerce_or_reject"}) + ) + + # Export the documents from a collection + export_output = await client.collections["books"].documents.export() + print(export_output) + + # Fetch a document in a collection + print(await client.collections["books"].documents["1"].retrieve()) + + # Search for documents in a collection + print( + await client.collections["books"].documents.search( + { + "q": "hunger", + "query_by": "title", + "sort_by": "ratings_count:desc", + } + ) + ) + + # Make multiple search requests at the same time + print( + await client.multi_search.perform( + { + "searches": [ + { + "q": "hunger", + "query_by": "title", + }, + { + "q": "suzanne", + "query_by": "authors", + }, + ] + }, + {"collection": "books", "sort_by": "ratings_count:desc"}, + ) + ) + + # Remove a document from a collection + print(await client.collections["books"].documents["1"].delete()) + + # Import documents into a collection + docs_to_import = [] + for exported_doc_str in export_output.split("\n"): + docs_to_import.append(json.loads(exported_doc_str)) + + import_results = await client.collections["books"].documents.import_( + docs_to_import + ) + print(import_results) + + # Upserting documents + import_results = await client.collections["books"].documents.import_( + docs_to_import, + { + "action": "upsert", + "return_id": True, + }, + ) + print(import_results) + + # Schema change: add optional field + schema_change = { + "fields": [{"name": "in_stock", "optional": True, "type": "bool"}] + } + print(await client.collections["books"].update(schema_change)) + + # Update value matching a filter + updated_doc = {"publication_year": 2009} + print( + await client.collections["books"].documents.update( + updated_doc, {"filter_by": "publication_year: 2008"} + ) + ) + + # Drop the field + schema_change = {"fields": [{"name": "in_stock", "drop": True}]} + print(await client.collections["books"].update(schema_change)) + + # Deleting documents matching a filter query + print( + await client.collections["books"].documents.delete( + {"filter_by": "ratings_count: 4780653"} + ) + ) + + # Try importing empty list + try: + import_results = await client.collections["books"].documents.import_( + [], {"action": "upsert"} + ) + print(import_results) + except TypesenseClientError: + print("Detected import of empty document list.") + + # Drop the collection + drop_response = await client.collections["books"].delete() + print(drop_response) + finally: + await client.api_call.aclose() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/collection_operations.py b/examples/collection_operations.py index f0c2e54..c55d5f0 100644 --- a/examples/collection_operations.py +++ b/examples/collection_operations.py @@ -2,7 +2,7 @@ import os import sys import typesense - +from typesense.exceptions import TypesenseClientError curr_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath(os.path.join(curr_dir, os.pardir))) @@ -53,7 +53,7 @@ # Add a book hunger_games_book = { - 'id': '1', 'original_publication_year': 2008, 'authors': ['Suzanne Collins'], 'average_rating': 4.34, + 'id': '1', 'authors': ['Suzanne Collins'], 'average_rating': 4.34, 'publication_year': 2008, 'title': 'The Hunger Games', 'image_url': 'https://images.gr-assets.com/books/1447303603m/2767052.jpg', 'ratings_count': 4780653 @@ -68,6 +68,10 @@ hunger_games_book_updated= {'id': '1', 'average_rating': 4.45} print(client.collections['books'].documents['1'].update(hunger_games_book_updated)) +# Try updating with bad data (with coercion enabled) +hunger_games_book_updated= {'id': '1', 'average_rating': '4.55'} +print(client.collections['books'].documents['1'].update(hunger_games_book_updated, {'dirty_values': 'coerce_or_reject'})) + # Export the documents from a collection export_output = client.collections['books'].documents.export() @@ -113,13 +117,32 @@ # Upserting documents import_results = client.collections['books'].documents.import_(docs_to_import, { 'action': 'upsert', + 'return_id': True }) print(import_results) +# Schema change: add optional field +schema_change = {"fields": [{"name": "in_stock", "optional": True, "type": "bool"}]} +print(client.collections['books'].update(schema_change)) + +# Update value matching a filter +updated_doc = {'publication_year': 2009} +print(client.collections['books'].documents.update(updated_doc, {'filter_by': 'publication_year: 2008'})) + +# Drop the field +schema_change = {"fields": [{"name": "in_stock", "drop": True}]} +print(client.collections['books'].update(schema_change)) + # Deleting documents matching a filter query print(client.collections['books'].documents.delete({'filter_by': 'ratings_count: 4780653'})) -# Drop the collection +# Try importing empty list +try: + import_results = client.collections['books'].documents.import_([], {"action": "upsert"}) + print(import_results) +except TypesenseClientError as e: + print("Detected import of empty document list.") +# Drop the collection drop_response = client.collections['books'].delete() print(drop_response) diff --git a/examples/index_and_search.py b/examples/index_and_search.py index e1422cf..219c0a0 100644 --- a/examples/index_and_search.py +++ b/examples/index_and_search.py @@ -67,6 +67,9 @@ res = client.collections['books'].documents.search({ 'q': 'the', 'query_by': 'title', - 'sort_by': 'ratings_count:desc' + 'sort_by': 'ratings_count:desc', + 'page': i, + 'per_page': 10, }) print(res['found']) + i += 1 diff --git a/examples/is_healthy.py b/examples/is_healthy.py new file mode 100644 index 0000000..c6a0fd0 --- /dev/null +++ b/examples/is_healthy.py @@ -0,0 +1,23 @@ +import typesense +from time import time + +client = typesense.Client({ + 'api_key': 'abcd', + 'nodes': [{ + 'host': 'localhost', + 'port': '8108', + 'protocol': 'http' + }], + 'connection_timeout_seconds': 2 +}) + + +# Create a collection + +while not client.operations.is_healthy(): + print("cluster is unhealthy, retrying...") + time.sleep(1) +print("cluster is healthy") + + + diff --git a/publish.sh b/publish.sh deleted file mode 100755 index 04180af..0000000 --- a/publish.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash -rm -rf dist/* -python setup.py bdist_wheel --universal -twine upload dist/* \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..d918e31 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,73 @@ +[project] +name = "typesense" +description = "Python client for Typesense, an open source and typo tolerant search engine." +authors = [{ name = "Typesense", email = "contact@typesense.org" }] +requires-python = ">=3.9" +readme = "README.md" +license = { text = "Apache 2.0" } +keywords = [ + "search", + "typesense", +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +dependencies = [ + "httpx>=0.28.1", + "typing-extensions", +] +dynamic = ["version"] + +[project.urls] +Documentation = "https://typesense.org/" +Source = "https://github.com/typesense/typesense-python" +Tracker = "https://github.com/typesense/typesense-python/issues" + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[dependency-groups] +dev = [ + "mypy>=1.19.0", + "pytest", + "pytest-asyncio", + "coverage", + "pytest-mock", + "python-dotenv", + "faker", + "ruff>=0.11.11", + "isort>=6.0.1", + "respx>=0.22.0", + "requests", + "unasync>=0.6.0", +] + +[tool.uv] +package = false + +[[tool.uv.index]] +name = "pypi" +url = "https://pypi.org/simple" + +[tool.setuptools.dynamic] +version = {attr = "typesense.__version__"} + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.coverage.run] +source = ["."] +omit = ["examples/*.py","./venv/*","tests/*/*.py","*__init__.py","*/*test.py", "./src/typesense/types/*.py"] + +[tool.coverage.report] +omit = ["examples/*.py","./venv/*","tests/*.py","*__init__.py","*/*test.py"] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..c7da18e --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +pythonpath = src +asyncio_mode = auto +markers = + open_ai diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..034bdec --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,48 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml --group dev -o requirements-dev.txt +certifi==2025.4.26 + # via requests +charset-normalizer==3.4.2 + # via requests +coverage==7.8.2 + # via typesense (pyproject.toml:dev) +faker==37.3.0 + # via typesense (pyproject.toml:dev) +idna==3.10 + # via requests +iniconfig==2.1.0 + # via pytest +mypy==1.15.0 + # via typesense (pyproject.toml:dev) +mypy-extensions==1.1.0 + # via mypy +packaging==25.0 + # via pytest +pluggy==1.6.0 + # via pytest +pytest==8.3.5 + # via + # typesense (pyproject.toml:dev) + # pytest-mock +pytest-mock==3.14.1 + # via typesense (pyproject.toml:dev) +python-dotenv==1.1.0 + # via typesense (pyproject.toml:dev) +requests==2.32.3 + # via + # typesense (pyproject.toml) + # requests-mock +requests-mock==1.12.1 + # via typesense (pyproject.toml:dev) +ruff==0.11.11 + # via typesense (pyproject.toml:dev) +types-requests==2.32.0.20250515 + # via typesense (pyproject.toml:dev) +typing-extensions==4.13.2 + # via mypy +tzdata==2025.2 + # via faker +urllib3==2.4.0 + # via + # requests + # types-requests diff --git a/requirements.txt b/requirements.txt index 566083c..d18b8a6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,12 @@ -requests==2.22.0 +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml -o requirements.txt +certifi==2024.8.30 + # via requests +charset-normalizer==3.3.2 + # via requests +idna==3.8 + # via requests +requests==2.32.3 + # via typesense (pyproject.toml) +urllib3==2.2.2 + # via requests diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..25a54ba --- /dev/null +++ b/ruff.toml @@ -0,0 +1,67 @@ +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +line-length = 88 +indent-width = 4 + +target-version = "py39" + +[lint] +select = ["E4", "E7", "E9", "F", "B"] + +ignore = ["E501"] + +fixable = ["ALL"] +unfixable = ["B"] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[format] +quote-style = "double" + +indent-style = "space" + +skip-magic-trailing-comma = false + +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = true + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" diff --git a/setup.cfg b/setup.cfg index 41a61e6..088736f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,61 @@ [flake8] -max-line-length = 160 +# flake8 configuration: +# https://flake8.pycqa.org/en/latest/user/configuration.html +format = wemake +show-source = true +statistics = false +doctests = true +enable-extensions = G +max-line-length = 88 +extend-select = B950 +extend-ignore = E203,E501,E701 + +# darglint configuration: +# https://github.com/terrencepreilly/darglint +strictness = long +docstring-style = sphinx + +# Flake plugins: +max-complexity = 6 + +# # Excluding some directories: +exclude = .git,__pycache__,venv,.eggs,*.egg,src/typesense/__init__.py +ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W503, WPS331, WPS306, WPS237, WPS202, RST301, RST306, WPS214, WPS235, WPS226, WPS337, WPS320, F821, WPS201 +per-file-ignores = + tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 + src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202, WPS115 + src/typesense/documents.py: WPS320, E704, D102, WPS428, WPS220 + src/typesense/stemming_dictionaries.py: WPS320, E704, D102, WPS428, WPS220 + src/typesense/api_call.py: WPS110, WPS211 + src/typesense/request_handler.py: WPS110, WPS211 + [metadata] license_file = LICENSE -[isort] \ No newline at end of file +[isort] +# isort configuration: +# https://github.com/PyCQA/isort/wiki/isort-Settings +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +ensure_newline_before_comments = True +line_length = 88 + +[mypy] +# Mypy configuration: +# https://mypy.readthedocs.io/en/latest/config_file.html +enable_error_code = + truthy-bool, + truthy-iterable, + redundant-expr, + unused-awaitable, + ignore-without-code, + possibly-undefined, + redundant-self, + +explicit_package_bases = true +ignore_missing_imports = true +strict = true +warn_unreachable = true diff --git a/setup.py b/setup.py deleted file mode 100644 index 8fa04c4..0000000 --- a/setup.py +++ /dev/null @@ -1,19 +0,0 @@ -from setuptools import setup - -from m2r import parse_from_file - -long_description = parse_from_file('README.md') - -setup( - name='typesense', - python_requires='>=3', - version='0.10.0', - packages=['examples', 'typesense'], - install_requires=['requests'], - url='https://typesense.org', - license='Apache 2.0', - author='Typesense', - author_email='contact@typesense.org', - description='Python client for Typesense, an open source and typo tolerant search engine.', - long_description=long_description, -) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py new file mode 100644 index 0000000..96654ba --- /dev/null +++ b/src/typesense/__init__.py @@ -0,0 +1,6 @@ +from .sync.client import Client # NOQA +from .async_.client import AsyncClient # NOQA + + +__version__ = "2.0.0" +__all__ = ["Client", "AsyncClient"] diff --git a/src/typesense/async_/__init__.py b/src/typesense/async_/__init__.py new file mode 100644 index 0000000..897c2c3 --- /dev/null +++ b/src/typesense/async_/__init__.py @@ -0,0 +1,3 @@ +from .client import AsyncClient # NOQA + +__all__ = ["AsyncClient"] diff --git a/src/typesense/async_/alias.py b/src/typesense/async_/alias.py new file mode 100644 index 0000000..189c014 --- /dev/null +++ b/src/typesense/async_/alias.py @@ -0,0 +1,80 @@ +""" +This module provides async functionality for managing individual aliases in Typesense. + +It contains the AsyncAlias class, which allows for retrieving and deleting +aliases asynchronously. + +Classes: + AsyncAlias: Manages async operations on a single alias in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.alias: Provides AliasSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import AsyncApiCall +from typesense.types.alias import AliasSchema + + +class AsyncAlias: + """ + Manages async operations on a single alias in the Typesense API. + + This class provides async methods to retrieve and delete an alias. + + Attributes: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + name (str): The name of the alias. + """ + + def __init__(self, api_call: AsyncApiCall, name: str): + """ + Initialize the AsyncAlias instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + name (str): The name of the alias. + """ + self.api_call = api_call + self.name = name + + async def retrieve(self) -> AliasSchema: + """ + Retrieve this specific alias. + + Returns: + AliasSchema: The schema containing the alias details. + """ + response: AliasSchema = await self.api_call.get( + self._endpoint_path, + entity_type=AliasSchema, + as_json=True, + ) + return response + + async def delete(self) -> AliasSchema: + """ + Delete this specific alias. + + Returns: + AliasSchema: The schema containing the deletion response. + """ + response: AliasSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=AliasSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific alias. + + Returns: + str: The constructed endpoint path. + """ + from .aliases import AsyncAliases + + return "/".join([AsyncAliases.resource_path, self.name]) diff --git a/src/typesense/async_/aliases.py b/src/typesense/async_/aliases.py new file mode 100644 index 0000000..4e3172a --- /dev/null +++ b/src/typesense/async_/aliases.py @@ -0,0 +1,129 @@ +""" +This module provides async functionality for managing aliases in Typesense. + +It contains the AsyncAliases class, which allows for creating, updating, retrieving, and +accessing individual aliases asynchronously. + +Classes: + AsyncAliases: Manages aliases in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_alias: Provides the AsyncAlias class for individual alias operations. + - typesense.types.alias: Provides AliasCreateSchema, AliasSchema, and AliasesResponseSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import AsyncApiCall +from .alias import AsyncAlias +from typesense.types.alias import AliasCreateSchema, AliasSchema, AliasesResponseSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AsyncAliases: + """ + Manages aliases in the Typesense API (async). + + This class provides async methods to create, update, retrieve, and access individual aliases. + + Attributes: + resource_path (str): The API endpoint path for alias operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + aliases (Dict[str, AsyncAlias]): A dictionary of AsyncAlias instances, keyed by alias name. + """ + + resource_path: typing.Final[str] = "/aliases" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncAliases instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + self.aliases: typing.Dict[str, AsyncAlias] = {} + + def __getitem__(self, name: str) -> AsyncAlias: + """ + Get or create an AsyncAlias instance for a given alias name. + + This method allows accessing aliases using dictionary-like syntax. + If the AsyncAlias instance doesn't exist, it creates a new one. + + Args: + name (str): The name of the alias. + + Returns: + AsyncAlias: The AsyncAlias instance for the specified alias name. + + Example: + >>> aliases = AsyncAliases(async_api_call) + >>> company_alias = aliases["company_alias"] + """ + if not self.aliases.get(name): + self.aliases[name] = AsyncAlias(self.api_call, name) + return self.aliases[name] + + async def upsert(self, name: str, mapping: AliasCreateSchema) -> AliasSchema: + """ + Create or update an alias. + + Args: + name (str): The name of the alias. + mapping (AliasCreateSchema): The schema for creating or updating the alias. + + Returns: + AliasSchema: The created or updated alias. + + Example: + >>> aliases = AsyncAliases(async_api_call) + >>> alias = await aliases.upsert( + ... "company_alias", {"collection_name": "companies"} + ... ) + """ + response: AliasSchema = await self.api_call.put( + self._endpoint_path(name), + body=mapping, + entity_type=AliasSchema, + ) + return response + + async def retrieve(self) -> AliasesResponseSchema: + """ + Retrieve all aliases. + + Returns: + AliasesResponseSchema: The schema containing all aliases. + + Example: + >>> aliases = AsyncAliases(async_api_call) + >>> all_aliases = await aliases.retrieve() + >>> for alias in all_aliases["aliases"]: + ... print(alias["name"]) + """ + response: AliasesResponseSchema = await self.api_call.get( + AsyncAliases.resource_path, + as_json=True, + entity_type=AliasesResponseSchema, + ) + return response + + def _endpoint_path(self, alias_name: str) -> str: + """ + Construct the API endpoint path for alias operations. + + Args: + alias_name (str): The name of the alias. + + Returns: + str: The constructed endpoint path. + """ + return "/".join([AsyncAliases.resource_path, alias_name]) diff --git a/src/typesense/async_/analytics.py b/src/typesense/async_/analytics.py new file mode 100644 index 0000000..127d58c --- /dev/null +++ b/src/typesense/async_/analytics.py @@ -0,0 +1,14 @@ +"""Client for Typesense Analytics module (async).""" + +from .analytics_events import AsyncAnalyticsEvents +from .analytics_rules import AsyncAnalyticsRules +from .api_call import AsyncApiCall + + +class AsyncAnalytics: + """Client for v30 Analytics endpoints (async).""" + + def __init__(self, api_call: AsyncApiCall) -> None: + self.api_call = api_call + self.rules = AsyncAnalyticsRules(api_call) + self.events = AsyncAnalyticsEvents(api_call) diff --git a/src/typesense/async_/analytics_events.py b/src/typesense/async_/analytics_events.py new file mode 100644 index 0000000..7540873 --- /dev/null +++ b/src/typesense/async_/analytics_events.py @@ -0,0 +1,71 @@ +"""Client for Analytics events and status operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from typesense.types.analytics import ( + AnalyticsEvent as AnalyticsEventSchema, + AnalyticsEventCreateResponse, + AnalyticsEventsResponse, + AnalyticsStatus, +) + + +class AsyncAnalyticsEvents: + events_path: typing.Final[str] = "/analytics/events" + flush_path: typing.Final[str] = "/analytics/flush" + status_path: typing.Final[str] = "/analytics/status" + + def __init__(self, api_call: AsyncApiCall) -> None: + self.api_call = api_call + + async def create(self, event: AnalyticsEventSchema) -> AnalyticsEventCreateResponse: + response: AnalyticsEventCreateResponse = await self.api_call.post( + AsyncAnalyticsEvents.events_path, + body=event, + as_json=True, + entity_type=AnalyticsEventCreateResponse, + ) + return response + + async def retrieve( + self, + *, + user_id: str, + name: str, + n: int, + ) -> AnalyticsEventsResponse: + params: typing.Dict[str, typing.Union[str, int]] = { + "user_id": user_id, + "name": name, + "n": n, + } + response: AnalyticsEventsResponse = await self.api_call.get( + AsyncAnalyticsEvents.events_path, + params=params, + as_json=True, + entity_type=AnalyticsEventsResponse, + ) + return response + + async def flush(self) -> AnalyticsEventCreateResponse: + response: AnalyticsEventCreateResponse = await self.api_call.post( + AsyncAnalyticsEvents.flush_path, + body={}, + as_json=True, + entity_type=AnalyticsEventCreateResponse, + ) + return response + + async def status(self) -> AnalyticsStatus: + response: AnalyticsStatus = await self.api_call.get( + AsyncAnalyticsEvents.status_path, + as_json=True, + entity_type=AnalyticsStatus, + ) + return response diff --git a/src/typesense/async_/analytics_rule.py b/src/typesense/async_/analytics_rule.py new file mode 100644 index 0000000..6233b7c --- /dev/null +++ b/src/typesense/async_/analytics_rule.py @@ -0,0 +1,31 @@ +"""Per-rule client for Analytics rules operations (async).""" + +from .api_call import AsyncApiCall +from typesense.types.analytics import AnalyticsRuleSchema + + +class AsyncAnalyticsRule: + def __init__(self, api_call: AsyncApiCall, rule_name: str) -> None: + self.api_call = api_call + self.rule_name = rule_name + + @property + def _endpoint_path(self) -> str: + from .analytics_rules import AsyncAnalyticsRules + + return "/".join([AsyncAnalyticsRules.resource_path, self.rule_name]) + + async def retrieve(self) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = await self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=AnalyticsRuleSchema, + ) + return response + + async def delete(self) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=AnalyticsRuleSchema, + ) + return response diff --git a/src/typesense/async_/analytics_rule_v1.py b/src/typesense/async_/analytics_rule_v1.py new file mode 100644 index 0000000..d640853 --- /dev/null +++ b/src/typesense/async_/analytics_rule_v1.py @@ -0,0 +1,117 @@ +""" +This module provides async functionality for managing individual analytics rules in Typesense (V1). + +Classes: + - AsyncAnalyticsRuleV1: Handles async operations related to a specific analytics rule. + +Methods: + - __init__: Initializes the AsyncAnalyticsRuleV1 object. + - _endpoint_path: Constructs the API endpoint path for this specific analytics rule. + - retrieve: Retrieves the details of this specific analytics rule. + - delete: Deletes this specific analytics rule. + +The AsyncAnalyticsRuleV1 class interacts with the Typesense API to manage operations on a +specific analytics rule. It provides methods to retrieve and delete individual rules. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typing_extensions import deprecated + +from .api_call import AsyncApiCall +from typesense.logger import warn_deprecation +from typesense.types.analytics_rule_v1 import ( + RuleDeleteSchema, + RuleSchemaForCounters, + RuleSchemaForQueries, +) + + +@deprecated( + "AsyncAnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead." +) +class AsyncAnalyticsRuleV1: + """ + Class for managing individual analytics rules in Typesense (V1) (async). + + This class provides methods to interact with a specific analytics rule, + including retrieving and deleting it. + + Attributes: + api_call (AsyncApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + + def __init__(self, api_call: AsyncApiCall, rule_id: str): + """ + Initialize the AsyncAnalyticsRuleV1 object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + self.api_call = api_call + self.rule_id = rule_id + + async def retrieve( + self, + ) -> typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]: + """ + Retrieve this specific analytics rule. + + Returns: + Union[RuleSchemaForQueries, RuleSchemaForCounters]: + The schema containing the rule details. + """ + response: typing.Union[ + RuleSchemaForQueries, RuleSchemaForCounters + ] = await self.api_call.get( + self._endpoint_path, + entity_type=dict, + as_json=True, + ) + return typing.cast( + typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], + response, + ) + + async def delete(self) -> RuleDeleteSchema: + """ + Delete this specific analytics rule. + + Returns: + RuleDeleteSchema: The schema containing the deletion response. + """ + response: RuleDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=RuleDeleteSchema, + ) + + return response + + @property + @warn_deprecation( # type: ignore[untyped-decorator] + "AsyncAnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead.", + flag_name="analytics_rules_v1_deprecation", + ) + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific analytics rule. + + Returns: + str: The constructed endpoint path. + """ + from .analytics_rules_v1 import AsyncAnalyticsRulesV1 + + return "/".join([AsyncAnalyticsRulesV1.resource_path, self.rule_id]) diff --git a/src/typesense/async_/analytics_rules.py b/src/typesense/async_/analytics_rules.py new file mode 100644 index 0000000..c7fa933 --- /dev/null +++ b/src/typesense/async_/analytics_rules.py @@ -0,0 +1,62 @@ +"""Client for Analytics rules collection operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .analytics_rule import AsyncAnalyticsRule +from .api_call import AsyncApiCall +from typesense.types.analytics import ( + AnalyticsRuleCreate, + AnalyticsRuleSchema, + AnalyticsRuleUpdate, +) + + +class AsyncAnalyticsRules(object): + resource_path: typing.Final[str] = "/analytics/rules" + + def __init__(self, api_call: AsyncApiCall) -> None: + self.api_call = api_call + self.rules: typing.Dict[str, AsyncAnalyticsRule] = {} + + def __getitem__(self, rule_name: str) -> AsyncAnalyticsRule: + if rule_name not in self.rules: + self.rules[rule_name] = AsyncAnalyticsRule(self.api_call, rule_name) + return self.rules[rule_name] + + async def create(self, rule: AnalyticsRuleCreate) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = await self.api_call.post( + AsyncAnalyticsRules.resource_path, + body=rule, + as_json=True, + entity_type=AnalyticsRuleSchema, + ) + return response + + async def retrieve( + self, *, rule_tag: typing.Union[str, None] = None + ) -> typing.List[AnalyticsRuleSchema]: + params: typing.Dict[str, str] = {} + if rule_tag: + params["rule_tag"] = rule_tag + response: typing.List[AnalyticsRuleSchema] = await self.api_call.get( + AsyncAnalyticsRules.resource_path, + params=params if params else None, + as_json=True, + entity_type=typing.List[AnalyticsRuleSchema], + ) + return response + + async def upsert( + self, rule_name: str, update: AnalyticsRuleUpdate + ) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = await self.api_call.put( + "/".join([AsyncAnalyticsRules.resource_path, rule_name]), + body=update, + entity_type=AnalyticsRuleSchema, + ) + return response diff --git a/src/typesense/async_/analytics_rules_v1.py b/src/typesense/async_/analytics_rules_v1.py new file mode 100644 index 0000000..1aac207 --- /dev/null +++ b/src/typesense/async_/analytics_rules_v1.py @@ -0,0 +1,179 @@ +""" +This module provides async functionality for managing analytics rules in Typesense (V1). + +Classes: + - AsyncAnalyticsRulesV1: Handles async operations related to analytics rules. + +Methods: + - __init__: Initializes the AsyncAnalyticsRulesV1 object. + - __getitem__: Retrieves or creates an AsyncAnalyticsRuleV1 object for a given rule_id. + - create: Creates a new analytics rule. + - upsert: Creates or updates an analytics rule. + - retrieve: Retrieves all analytics rules. + +Attributes: + - resource_path: The API resource path for analytics rules. + +The AsyncAnalyticsRulesV1 class interacts with the Typesense API to manage analytics rule operations. +It provides methods to create, update, and retrieve analytics rules, as well as access +individual AsyncAnalyticsRuleV1 objects. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typesense.logger import warn_deprecation + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .analytics_rule_v1 import AsyncAnalyticsRuleV1 +from .api_call import AsyncApiCall +from typesense.types.analytics_rule_v1 import ( + RuleCreateSchemaForCounters, + RuleCreateSchemaForQueries, + RuleSchemaForCounters, + RuleSchemaForQueries, + RulesRetrieveSchema, +) + +_RuleParams = typing.Union[ + typing.Dict[str, typing.Union[str, int, bool]], + None, +] + + +class AsyncAnalyticsRulesV1(object): + """ + Class for managing analytics rules in Typesense (V1) (async). + + This class provides methods to interact with analytics rules, including + creating, updating, and retrieving them. + + Attributes: + resource_path (str): The API resource path for analytics rules. + api_call (AsyncApiCall): The API call object for making requests. + rules (Dict[str, AsyncAnalyticsRuleV1]): A dictionary of AsyncAnalyticsRuleV1 objects. + """ + + resource_path: typing.Final[str] = "/analytics/rules" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncAnalyticsRulesV1 object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.rules: typing.Dict[str, AsyncAnalyticsRuleV1] = {} + + def __getitem__(self, rule_id: str) -> AsyncAnalyticsRuleV1: + """ + Get or create an AsyncAnalyticsRuleV1 object for a given rule_id. + + Args: + rule_id (str): The ID of the analytics rule. + + Returns: + AsyncAnalyticsRuleV1: The AsyncAnalyticsRuleV1 object for the given ID. + """ + if not self.rules.get(rule_id): + self.rules[rule_id] = AsyncAnalyticsRuleV1(self.api_call, rule_id) + return self.rules[rule_id] + + @warn_deprecation( # type: ignore[untyped-decorator] + "AsyncAnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) + async def create( + self, + rule: typing.Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries], + rule_parameters: _RuleParams = None, + ) -> typing.Union[RuleSchemaForCounters, RuleSchemaForQueries]: + """ + Create a new analytics rule. + + This method can create both counter rules and query rules. + + Args: + rule (Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries]): + The rule schema. Use RuleCreateSchemaForCounters for counter rules + and RuleCreateSchemaForQueries for query rules. + + rule_parameters (_RuleParams, optional): Additional rule parameters. + + Returns: + Union[RuleSchemaForCounters, RuleSchemaForQueries]: + The created rule. Returns RuleSchemaForCounters for counter rules + and RuleSchemaForQueries for query rules. + """ + response: typing.Union[ + RuleSchemaForCounters, RuleSchemaForQueries + ] = await self.api_call.post( + AsyncAnalyticsRulesV1.resource_path, + body=rule, + params=rule_parameters, + as_json=True, + entity_type=dict, + ) + return typing.cast( + typing.Union[RuleSchemaForCounters, RuleSchemaForQueries], + response, + ) + + @warn_deprecation( # type: ignore[untyped-decorator] + "AsyncAnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) + async def upsert( + self, + rule_id: str, + rule: typing.Union[RuleCreateSchemaForQueries, RuleSchemaForCounters], + ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: + """ + Create or update an analytics rule. + + Args: + rule_id (str): The ID of the rule to upsert. + rule (Union[RuleCreateSchemaForQueries, RuleSchemaForCounters]): The rule schema. + + Returns: + Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: The upserted rule. + """ + response: typing.Union[ + RuleSchemaForCounters, RuleCreateSchemaForQueries + ] = await self.api_call.put( + "/".join([self.resource_path, rule_id]), + body=rule, + entity_type=dict, + ) + return typing.cast( + typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], + response, + ) + + @warn_deprecation( # type: ignore[untyped-decorator] + "AsyncAnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) + async def retrieve(self) -> RulesRetrieveSchema: + """ + Retrieve all analytics rules. + + Returns: + RulesRetrieveSchema: The schema containing all analytics rules. + """ + response: RulesRetrieveSchema = await self.api_call.get( + AsyncAnalyticsRulesV1.resource_path, + as_json=True, + entity_type=RulesRetrieveSchema, + ) + return response diff --git a/src/typesense/async_/analytics_v1.py b/src/typesense/async_/analytics_v1.py new file mode 100644 index 0000000..796944b --- /dev/null +++ b/src/typesense/async_/analytics_v1.py @@ -0,0 +1,49 @@ +""" +This module provides async functionality for managing analytics (V1) in Typesense. + +Classes: + - AsyncAnalyticsV1: Handles async operations related to analytics, including access to analytics rules. + +Methods: + - __init__: Initializes the AsyncAnalyticsV1 object. + +The AsyncAnalyticsV1 class serves as an entry point for analytics-related operations in Typesense, +currently providing access to AsyncAnalyticsRulesV1. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typing_extensions import deprecated + +from .analytics_rules_v1 import AsyncAnalyticsRulesV1 +from .api_call import AsyncApiCall + + +@deprecated("AsyncAnalyticsV1 is deprecated on v30+. Use client.analytics instead.") +class AsyncAnalyticsV1(object): + """ + Class for managing analytics in Typesense (V1) (async). + + This class provides access to analytics-related functionalities, + currently including operations on analytics rules. + + Attributes: + rules (AsyncAnalyticsRulesV1): An instance of AsyncAnalyticsRulesV1 for managing analytics rules. + """ + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncAnalyticsV1 object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + """ + self._rules = AsyncAnalyticsRulesV1(api_call) + + @property + def rules(self) -> AsyncAnalyticsRulesV1: + return self._rules diff --git a/src/typesense/async_/api_call.py b/src/typesense/async_/api_call.py new file mode 100644 index 0000000..0953310 --- /dev/null +++ b/src/typesense/async_/api_call.py @@ -0,0 +1,541 @@ +""" +This module provides async functionality for making API calls to a Typesense server. + +It contains the AsyncApiCall class, which is responsible for executing async HTTP requests +to the Typesense API, handling retries, and managing node health. + +Key features: +- Support for GET, POST, PUT, PATCH, and DELETE HTTP methods (async) +- Automatic retries on server errors +- Node health management +- Type-safe request execution with overloaded methods + +Classes: + AsyncApiCall: Manages async API calls to the Typesense server. + +Dependencies: + - httpx: For making async HTTP requests + - typesense.configuration: Provides Configuration and Node classes + - typesense.exceptions: Custom exception classes + - typesense.node_manager: Provides NodeManager class + +Usage: + from typesense.configuration import Configuration + from .api_call import AsyncApiCall + + config = Configuration(...) + api_call = AsyncApiCall(config) + response = await api_call.get("/collections", SomeEntityType) + +Note: This module is part of the Typesense Python client library and is used internally +by other components of the library. +""" + +import sys +from types import MappingProxyType, TracebackType + +import httpx + +from typesense.configuration import Configuration, Node +from typesense.exceptions import ( + HTTPStatus0Error, + ObjectAlreadyExists, + ObjectNotFound, + ObjectUnprocessable, + RequestForbidden, + RequestMalformed, + RequestUnauthorized, + ServerError, + ServiceUnavailable, + TypesenseClientError, +) +from typesense.node_manager import NodeManager +from typesense.request_handler import RequestHandler + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TEntityDict = typing.TypeVar("TEntityDict") +TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) +TBody = typing.TypeVar( + "TBody", bound=typing.Union[str, bytes, typing.Mapping[str, typing.Any]] +) + + +class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): + """ + Type definition for keyword arguments used in request functions. + + This is an internal abstraction that gets converted to httpx's request parameters. + The `data` field is converted to `content` when passed to httpx. + + Note: `verify` and `timeout` are set on the httpx client, not in request kwargs. + However, we include them here for compatibility with the existing API. + + Attributes: + params (Optional[Union[TParams, None]]): Query parameters for the request. + Passed as `params` to httpx. + + data (Optional[Union[TBody, str, None]]): Body of the request. + Converted to `content` (JSON string) when passed to httpx. + + headers (Optional[Dict[str, str]]): Headers for the request. + Passed as `headers` to httpx. + + timeout (float): Timeout for the request in seconds. + Set on the httpx client, not in request kwargs. + + verify (bool): Whether to verify SSL certificates. + Set on the httpx client, not in request kwargs. + """ + + params: typing.NotRequired[typing.Union[TParams, None]] + data: typing.NotRequired[typing.Union[TBody, None]] + content: typing.NotRequired[typing.Union[TBody, str, None]] + headers: typing.NotRequired[typing.Dict[str, str]] + timeout: typing.NotRequired[float] + + +_ERROR_CODE_MAP: typing.Final[ + typing.Mapping[str, typing.Type[TypesenseClientError]] +] = MappingProxyType( + { + "0": HTTPStatus0Error, + "400": RequestMalformed, + "401": RequestUnauthorized, + "403": RequestForbidden, + "404": ObjectNotFound, + "409": ObjectAlreadyExists, + "422": ObjectUnprocessable, + "500": ServerError, + "503": ServiceUnavailable, + }, +) + +_SERVER_ERRORS: typing.Final[ + typing.Tuple[ + typing.Type[httpx.TimeoutException], + typing.Type[httpx.ConnectError], + typing.Type[httpx.HTTPError], + typing.Type[httpx.RequestError], + typing.Type[HTTPStatus0Error], + typing.Type[ServerError], + typing.Type[ServiceUnavailable], + ] +] = ( + httpx.TimeoutException, + httpx.ConnectError, + httpx.HTTPError, + httpx.RequestError, + HTTPStatus0Error, + ServerError, + ServiceUnavailable, +) + + +class AsyncApiCall: + """ + Manages async API calls to the Typesense server. + + This class handles the execution of async HTTP requests to the Typesense API, + including retries, node health management, and error handling. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + node_manager (NodeManager): Manages the nodes in the Typesense cluster. + _client (httpx.AsyncClient): The httpx async client for making requests. + """ + + def __init__(self, config: Configuration): + """ + Initialize the AsyncApiCall instance. + + Args: + config (Configuration): The configuration object for the Typesense client. + """ + self.config = config + self.node_manager = NodeManager(config) + self.request_handler = RequestHandler(config) + self._client = httpx.AsyncClient( + timeout=config.connection_timeout_seconds, + ) + + async def __aenter__(self) -> "AsyncApiCall": + """Async context manager entry.""" + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_val: typing.Optional[BaseException], + exc_tb: typing.Optional[TracebackType], + ) -> None: + """Async context manager exit.""" + await self._client.aclose() + + async def aclose(self) -> None: + """Close the httpx client.""" + await self._client.aclose() + + @typing.overload + async def get( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + params: typing.Union[TParams, None] = None, + ) -> str: + """ + Execute an async GET request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (False): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + str: The response, as a string. + """ + + @typing.overload + async def get( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True] = True, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async GET request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (True): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + EntityDict: The response, as a JSON object. + """ + + async def get( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + params: typing.Union[TParams, None] = None, + ) -> typing.Union[TEntityDict, str]: + """ + Execute an async GET request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + """ + return await self._execute_request( + "GET", + endpoint, + entity_type, + as_json, + params=params, + ) + + @typing.overload + async def post( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> str: + """ + Execute an async POST request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (False): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + body (Union[TBody, None], optional): Request body. + + Returns: + str: The response, as a string. + """ + + @typing.overload + async def post( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True] = True, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> TEntityDict: + """ + Execute an async POST request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (True): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + body (Union[TBody, None], optional): Request body. + + Returns: + EntityDict: The response, as a JSON object. + """ + + async def post( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> typing.Union[str, TEntityDict]: + """ + Execute an async POST request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + body (Union[TBody, None], optional): Request body. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + """ + return await self._execute_request( + "POST", + endpoint, + entity_type, + as_json, + params=params, + data=body, + ) + + async def put( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + body: TBody, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async PUT request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + body (TBody): Request body. + + Returns: + EntityDict: The response, as a JSON object. + """ + return await self._execute_request( + "PUT", + endpoint, + entity_type, + as_json=True, + params=params, + data=body, + ) + + async def patch( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + body: TBody, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async PATCH request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + body (TBody): Request body. + + Returns: + EntityDict: The response, as a JSON object. + """ + return await self._execute_request( + "PATCH", + endpoint, + entity_type, + as_json=True, + params=params, + data=body, + ) + + async def delete( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async DELETE request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + EntityDict: The response, as a JSON object. + """ + return await self._execute_request( + "DELETE", + endpoint, + entity_type, + as_json=True, + params=params, + ) + + @typing.overload + async def _execute_request( + self, + method: str, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True], + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict: + """Execute an async request with retry logic.""" + + @typing.overload + async def _execute_request( + self, + method: str, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> str: + """Execute an async request with retry logic.""" + + async def _execute_request( + self, + method: str, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """ + Execute an async request to the Typesense API with retry logic. + + This method handles the actual execution of the request, including + node selection, error handling, and retries. + + Args: + method (str): The HTTP method to use (GET, POST, PUT, PATCH, DELETE). + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + last_exception (Union[None, Exception], optional): The last exception encountered. + num_retries (int): The current number of retries attempted. + kwargs: Additional keyword arguments for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + + Raises: + TypesenseClientError: If all nodes are unhealthy or max retries are exceeded. + """ + if num_retries > self.config.num_retries: + if last_exception: + raise last_exception + raise TypesenseClientError("All nodes are unhealthy") + + node, url, request_kwargs = self._prepare_request_params(endpoint, **kwargs) + + try: + return await self._make_request_and_process_response( + method, + url, + entity_type, + as_json, + **request_kwargs, + ) + except _SERVER_ERRORS as server_error: + self.node_manager.set_node_health(node, is_healthy=False) + return await self._execute_request( + method, + endpoint, + entity_type, + as_json, + last_exception=server_error, + num_retries=num_retries + 1, + **kwargs, + ) + + async def _make_request_and_process_response( + self, + method: str, + url: str, + entity_type: typing.Type[TEntityDict], + as_json: bool, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """Make the async API request and process the response.""" + request_response = await self.request_handler.make_request( + method=method, + url=url, + as_json=as_json, + entity_type=entity_type, + client=self._client, + **kwargs, + ) + self.node_manager.set_node_health( + self.node_manager.get_node(), + is_healthy=True, + ) + return ( + typing.cast(TEntityDict, request_response) + if as_json + else typing.cast(str, request_response) + ) + + def _prepare_request_params( + self, + endpoint: str, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Tuple[Node, str, SessionFunctionKwargs[TParams, TBody]]: + """ + Prepare request parameters including node selection and URL construction. + + Args: + endpoint: The API endpoint path. + **kwargs: Request parameters following SessionFunctionKwargs structure. + + Returns: + Tuple of (node, full_url, kwargs_dict) where kwargs_dict contains + the request parameters as a regular dict for further processing. + """ + node = self.node_manager.get_node() + url = node.url() + endpoint + + if params := kwargs.get("params"): + self.request_handler.normalize_params(params) + + return node, url, kwargs diff --git a/src/typesense/async_/client.py b/src/typesense/async_/client.py new file mode 100644 index 0000000..1ecb807 --- /dev/null +++ b/src/typesense/async_/client.py @@ -0,0 +1,168 @@ +""" +This module provides the main async client interface for interacting with the Typesense API. + +It contains the AsyncClient class, which serves as the entry point for all Typesense operations, +integrating various components like collections, multi-search, keys, aliases, analytics, etc. + +Classes: + Client: The main client class for interacting with Typesense. + +Dependencies: + - typesense.aliases: Provides the AsyncAliases class. + - typesense.analytics: Provides the AsyncAnalytics class. + - typesense.api_call: Provides the AsyncApiCall class for making API requests. + - typesense.collection: Provides the AsyncCollection class. + - typesense.collections: Provides the AsyncCollections class. + - typesense.configuration: Provides AsyncConfiguration and ConfigDict types. + - typesense.conversations_models: Provides the AsyncConversationsModels class. + - typesense.debug: Provides the AsyncDebug class. + - typesense.keys: Provides the AsyncKeys class. + - typesense.metrics: Provides the AsyncMetrics class. + - typesense.multi_search: Provides the AsyncMultiSearch class. + - typesense.operations: Provides the AsyncOperations class. + - typesense.stopwords: Provides the AsyncStopwords class. + - typesense.types.document: Provides the AsyncDocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from typing_extensions import deprecated + +from typesense.types.document import DocumentSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .aliases import AsyncAliases +from .analytics import AsyncAnalytics +from .analytics_v1 import AsyncAnalyticsV1 +from .api_call import AsyncApiCall +from .collection import AsyncCollection +from .collections import AsyncCollections +from .conversations_models import AsyncConversationsModels +from .curation_sets import AsyncCurationSets +from .debug import AsyncDebug +from .keys import AsyncKeys +from .metrics import AsyncMetrics +from .multi_search import AsyncMultiSearch +from .nl_search_models import AsyncNLSearchModels +from .operations import AsyncOperations +from .stemming import AsyncStemming +from .stopwords import AsyncStopwords +from .synonym_sets import AsyncSynonymSets +from typesense.configuration import ConfigDict, Configuration + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + + +class AsyncClient: + """ + The main client class for interacting with Typesense. + + This class serves as the entry point for all Typesense operations. It initializes + and provides access to various components of the Typesense SDK, such as collections, + multi-search, keys, aliases, analytics, stemming, operations, debug, stopwords, + and conversation models. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + api_call (ApiCall): The ApiCall instance for making API requests. + collections (Collections[DocumentSchema]): Instance for managing collections. + multi_search (MultiSearch): Instance for performing multi-search operations. + keys (Keys): Instance for managing API keys. + aliases (Aliases): Instance for managing collection aliases. + analyticsV1 (AnalyticsV1): Instance for analytics operations (V1). + analytics (Analytics): Instance for analytics operations (v30). + curation_sets (CurationSets): Instance for Curation Sets (v30+) + stemming (Stemming): Instance for stemming dictionary operations. + operations (Operations): Instance for various Typesense operations. + debug (Debug): Instance for debug operations. + stopwords (Stopwords): Instance for managing stopwords. + metrics (Metrics): Instance for retrieving system and Typesense metrics. + conversations_models (ConversationsModels): Instance for managing conversation models. + """ + + def __init__(self, config_dict: ConfigDict) -> None: + """ + Initialize the Client instance. + + Args: + config_dict (ConfigDict): + A dictionary containing the configuration for the Typesense client. + + Example: + >>> config = { + ... "api_key": "your_api_key", + ... "nodes": [ + ... {"host": "localhost", "port": "8108", "protocol": "http"} + ... ], + ... "connection_timeout_seconds": 2, + ... } + >>> client = Client(config) + """ + self.config = Configuration(config_dict) + self.api_call = AsyncApiCall(self.config) + self.collections: AsyncCollections[DocumentSchema] = AsyncCollections( + self.api_call + ) + self.multi_search = AsyncMultiSearch(self.api_call) + self.keys = AsyncKeys(self.api_call) + self.aliases = AsyncAliases(self.api_call) + self._analyticsV1 = AsyncAnalyticsV1(self.api_call) + self.analytics = AsyncAnalytics(self.api_call) + self.stemming = AsyncStemming(self.api_call) + self.curation_sets = AsyncCurationSets(self.api_call) + self.operations = AsyncOperations(self.api_call) + self.debug = AsyncDebug(self.api_call) + self.stopwords = AsyncStopwords(self.api_call) + self.synonym_sets = AsyncSynonymSets(self.api_call) + self.metrics = AsyncMetrics(self.api_call) + self.conversations_models = AsyncConversationsModels(self.api_call) + self.nl_search_models = AsyncNLSearchModels(self.api_call) + + @property + @deprecated( + "AnalyticsV1 is deprecated on v30+. Use client.analytics instead.", + category=None, + ) + def analyticsV1(self) -> AsyncAnalyticsV1: + return self._analyticsV1 + + def typed_collection( + self, + *, + model: typing.Type[TDoc], + name: typing.Union[str, None] = None, + ) -> AsyncCollection[TDoc]: + """ + Get a AsyncCollection instance for a specific document model. + + This method allows retrieving a AsyncCollection instance typed to a specific document model. + If no name is provided, it uses the lowercase name of the model class as + the collection name. + + Args: + model (Type[TDoc]): The document model class. + name (Union[str, None], optional): + The name of the collection. If None, uses the lowercase model class name. + + Returns: + AsyncCollection[TDoc]: An AsyncCollection instance typed to the specified document model. + + Example: + >>> class Company(DocumentSchema): + ... name: str + ... num_employees: int + >>> client = Client(config) + >>> companies_collection = client.typed_collection(model=Company) + # This is equivalent to: + # companies_collection = client.typed_collection(model=Company, name="company") + """ + if name is None: + name = model.__name__.lower() + collection: AsyncCollection[TDoc] = self.collections[name] + return collection diff --git a/src/typesense/async_/collection.py b/src/typesense/async_/collection.py new file mode 100644 index 0000000..5851573 --- /dev/null +++ b/src/typesense/async_/collection.py @@ -0,0 +1,160 @@ +""" +This module provides async functionality for managing individual collections in the Typesense API. + +It contains the AsyncCollection class, which allows for retrieving, updating, and deleting +collections asynchronously. + +Classes: + AsyncCollection: Manages async operations on a single collection in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.collection: Provides CollectionSchema and CollectionUpdateSchema types. + - typesense.types.document: Provides DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from typing_extensions import deprecated + +from typesense.types.collection import CollectionSchema, CollectionUpdateSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from .documents import AsyncDocuments +from .overrides import AsyncOverrides +from .synonyms import AsyncSynonyms +from typesense.types.document import DocumentSchema + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema, covariant=True) + + +class AsyncCollection(typing.Generic[TDoc]): + """ + Manages async operations on a single collection in the Typesense API. + + This class provides async methods to retrieve, update, and delete a collection. + It is generic over the document type TDoc, which should be a subtype of DocumentSchema. + + Attributes: + name (str): The name of the collection. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + def __init__(self, api_call: AsyncApiCall, name: str): + """ + Initialize the AsyncCollection instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + name (str): The name of the collection. + """ + self.name = name + self.api_call = api_call + + self.documents: AsyncDocuments[TDoc] = AsyncDocuments(api_call, name) + self._overrides = AsyncOverrides(api_call, name) + self._synonyms = AsyncSynonyms(api_call, name) + + async def retrieve(self) -> CollectionSchema: + """ + Retrieve the schema of this collection from Typesense. + + Returns: + CollectionSchema: The schema of the collection. + """ + response: CollectionSchema = await self.api_call.get( + endpoint=self._endpoint_path, + entity_type=CollectionSchema, + as_json=True, + ) + return response + + async def update( + self, schema_change: CollectionUpdateSchema + ) -> CollectionUpdateSchema: + """ + Update the schema of this collection in Typesense. + + Args: + schema_change (CollectionUpdateSchema): + The changes to apply to the collection schema. + + Returns: + CollectionUpdateSchema: The updated schema of the collection. + """ + response: CollectionUpdateSchema = await self.api_call.patch( + endpoint=self._endpoint_path, + body=schema_change, + entity_type=CollectionUpdateSchema, + ) + return response + + async def delete( + self, + delete_parameters: typing.Union[ + typing.Dict[str, typing.Union[str, bool]], + None, + ] = None, + ) -> CollectionSchema: + """ + Delete this collection from Typesense. + + Args: + delete_parameters (Union[Dict[str, Union[str, bool]], None], optional): + Additional parameters for the delete operation. Defaults to None. + + Returns: + CollectionSchema: The schema of the deleted collection. + """ + response: CollectionSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=CollectionSchema, + params=delete_parameters, + ) + return response + + @property + @deprecated( + "Overrides is deprecated on v30+. Use client.curation_sets instead.", + category=None, + ) + def overrides(self) -> AsyncOverrides: + """Return the AsyncOverrides instance for this collection. + + Returns: + AsyncOverrides: The AsyncOverrides instance for this collection. + """ + return self._overrides + + @property + @deprecated( + "Synonyms is deprecated on v30+. Use client.synonym_sets instead.", + category=None, + ) + def synonyms(self) -> AsyncSynonyms: + """Return the AsyncSynonyms instance for this collection. + + Returns: + AsyncSynonyms: The AsyncSynonyms instance for this collection. + """ + """Return the AsyncSynonyms instance for this collection.""" + return self._synonyms + + @property + def _endpoint_path(self) -> str: + """ + Get the API endpoint path for this collection. + + Returns: + str: The full endpoint path for the collection. + """ + from .collections import AsyncCollections + + return "/".join([AsyncCollections.resource_path, self.name]) diff --git a/src/typesense/async_/collections.py b/src/typesense/async_/collections.py new file mode 100644 index 0000000..79bd3ba --- /dev/null +++ b/src/typesense/async_/collections.py @@ -0,0 +1,163 @@ +""" +This module provides async functionality for managing collections in the Typesense API. + +It contains the AsyncCollections class, which allows for creating, retrieving, and +accessing individual collections asynchronously. + +Classes: + AsyncCollections: Manages collections in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_collection: Provides the AsyncCollection class for individual collection operations. + - typesense.types.collection: Provides CollectionCreateSchema and CollectionSchema types. + - typesense.types.document: Provides DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from .collection import AsyncCollection +from typesense.types.collection import CollectionCreateSchema, CollectionSchema +from typesense.types.document import DocumentSchema + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema, covariant=True) + + +class AsyncCollections(typing.Generic[TDoc]): + """ + Manages collections in the Typesense API (async). + + This class provides async methods to create, retrieve, and access individual collections. + It is generic over the document type TDoc, which should be a subtype of DocumentSchema. + + Attributes: + resource_path (str): The API endpoint path for collections operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + collections (Dict[str, AsyncCollection[TDoc]]): + A dictionary of AsyncCollection instances, keyed by collection name. + """ + + resource_path: typing.Final[str] = "/collections" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncCollections instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + self.collections: typing.Dict[str, AsyncCollection[TDoc]] = {} + + async def __contains__(self, collection_name: str) -> bool: + """ + Check if a collection exists in Typesense. + + This method tries to retrieve the specified collection to check for its existence, + utilizing the AsyncCollection.retrieve() method but without caching non-existent collections. + + Args: + collection_name (str): The name of the collection to check. + + Returns: + bool: True if the collection exists, False otherwise. + """ + if collection_name in self.collections: + try: + await self.collections[collection_name].retrieve() + return True + except Exception: + self.collections.pop(collection_name, None) + return False + + try: + await AsyncCollection(self.api_call, collection_name).retrieve() + return True + except Exception: + return False + + def __getitem__(self, collection_name: str) -> AsyncCollection[TDoc]: + """ + Get or create an AsyncCollection instance for a given collection name. + + This method allows accessing collections using dictionary-like syntax. + If the AsyncCollection instance doesn't exist, it creates a new one. + + Args: + collection_name (str): The name of the collection to access. + + Returns: + AsyncCollection[TDoc]: The AsyncCollection instance for the specified collection name. + + Example: + >>> collections = AsyncCollections(async_api_call) + >>> fruits_collection = collections["fruits"] + """ + if not self.collections.get(collection_name): + self.collections[collection_name] = AsyncCollection( + self.api_call, + collection_name, + ) + return self.collections[collection_name] + + async def create(self, schema: CollectionCreateSchema) -> CollectionSchema: + """ + Create a new collection in Typesense. + + Args: + schema (CollectionCreateSchema): + The schema defining the structure of the new collection. + + Returns: + CollectionSchema: + The schema of the created collection, as returned by the API. + + Example: + >>> collections = AsyncCollections(async_api_call) + >>> schema = { + ... "name": "companies", + ... "fields": [ + ... {"name": "company_name", "type": "string"}, + ... {"name": "num_employees", "type": "int32"}, + ... {"name": "country", "type": "string", "facet": True}, + ... ], + ... "default_sorting_field": "num_employees", + ... } + >>> created_schema = await collections.create(schema) + """ + call: CollectionSchema = await self.api_call.post( + endpoint=AsyncCollections.resource_path, + entity_type=CollectionSchema, + as_json=True, + body=schema, + ) + return call + + async def retrieve(self) -> typing.List[CollectionSchema]: + """ + Retrieve all collections from Typesense. + + Returns: + List[CollectionSchema]: + A list of schemas for all collections in the Typesense instance. + + Example: + >>> collections = AsyncCollections(async_api_call) + >>> all_collections = await collections.retrieve() + >>> for collection in all_collections: + ... print(collection["name"]) + """ + call: typing.List[CollectionSchema] = await self.api_call.get( + endpoint=AsyncCollections.resource_path, + as_json=True, + entity_type=typing.List[CollectionSchema], + ) + return call diff --git a/src/typesense/async_/conversation_model.py b/src/typesense/async_/conversation_model.py new file mode 100644 index 0000000..31cfbb1 --- /dev/null +++ b/src/typesense/async_/conversation_model.py @@ -0,0 +1,104 @@ +""" +This module provides async functionality for managing individual conversation models in Typesense. + +It contains the AsyncConversationModel class, which allows for retrieving, updating, and deleting +conversation models asynchronously. + +Classes: + AsyncConversationModel: Manages async operations on a single conversation model in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.conversations_model: Provides ConversationModelCreateSchema, ConversationModelDeleteSchema, and ConversationModelSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import AsyncApiCall +from typesense.types.conversations_model import ( + ConversationModelCreateSchema, + ConversationModelDeleteSchema, + ConversationModelSchema, +) + + +class AsyncConversationModel: + """ + Manages async operations on a single conversation model in the Typesense API. + + This class provides async methods to retrieve, update, and delete a conversation model. + + Attributes: + model_id (str): The ID of the conversation model. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + def __init__(self, api_call: AsyncApiCall, model_id: str) -> None: + """ + Initialize the AsyncConversationModel instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + model_id (str): The ID of the conversation model. + """ + self.model_id = model_id + self.api_call = api_call + + async def retrieve(self) -> ConversationModelSchema: + """ + Retrieve this specific conversation model. + + Returns: + ConversationModelSchema: The schema containing the conversation model details. + """ + response: ConversationModelSchema = await self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=ConversationModelSchema, + ) + return response + + async def update( + self, model: ConversationModelCreateSchema + ) -> ConversationModelSchema: + """ + Update this specific conversation model. + + Args: + model (ConversationModelCreateSchema): + The schema containing the updated model details. + + Returns: + ConversationModelSchema: The schema containing the updated conversation model. + """ + response: ConversationModelSchema = await self.api_call.put( + self._endpoint_path, + body=model, + entity_type=ConversationModelSchema, + ) + return response + + async def delete(self) -> ConversationModelDeleteSchema: + """ + Delete this specific conversation model. + + Returns: + ConversationModelDeleteSchema: The schema containing the deletion response. + """ + response: ConversationModelDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=ConversationModelDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific conversation model. + + Returns: + str: The constructed endpoint path. + """ + from .conversations_models import AsyncConversationsModels + + return "/".join([AsyncConversationsModels.resource_path, self.model_id]) diff --git a/src/typesense/async_/conversations_models.py b/src/typesense/async_/conversations_models.py new file mode 100644 index 0000000..6d36296 --- /dev/null +++ b/src/typesense/async_/conversations_models.py @@ -0,0 +1,131 @@ +""" +This module provides async functionality for managing conversation models in Typesense. + +It contains the AsyncConversationsModels class, which allows for creating, retrieving, and +accessing individual conversation models asynchronously. + +Classes: + AsyncConversationsModels: Manages conversation models in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_conversation_model: Provides the AsyncConversationModel class for individual conversation model operations. + - typesense.types.conversations_model: Provides ConversationModelCreateSchema and ConversationModelSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import AsyncApiCall +from .conversation_model import AsyncConversationModel +from typesense.types.conversations_model import ( + ConversationModelCreateSchema, + ConversationModelSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AsyncConversationsModels: + """ + Manages conversation models in the Typesense API (async). + + This class provides async methods to create, retrieve, and access individual conversation models. + + Attributes: + resource_path (str): The API endpoint path for conversation models operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + conversations_models (Dict[str, AsyncConversationModel]): + A dictionary of AsyncConversationModel instances, keyed by model ID. + """ + + resource_path: typing.Final[str] = "/conversations/models" + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncConversationsModels instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + self.conversations_models: typing.Dict[str, AsyncConversationModel] = {} + + def __getitem__(self, model_id: str) -> AsyncConversationModel: + """ + Get or create an AsyncConversationModel instance for a given model ID. + + This method allows accessing conversation models using dictionary-like syntax. + If the AsyncConversationModel instance doesn't exist, it creates a new one. + + Args: + model_id (str): The ID of the conversation model. + + Returns: + AsyncConversationModel: The AsyncConversationModel instance for the specified model ID. + + Example: + >>> conversations_models = AsyncConversationsModels(async_api_call) + >>> model = conversations_models["model_id"] + """ + if model_id not in self.conversations_models: + self.conversations_models[model_id] = AsyncConversationModel( + self.api_call, + model_id, + ) + return self.conversations_models[model_id] + + async def create( + self, model: ConversationModelCreateSchema + ) -> ConversationModelSchema: + """ + Create a new conversation model. + + Args: + model (ConversationModelCreateSchema): + The schema for creating the conversation model. + + Returns: + ConversationModelSchema: The created conversation model. + + Example: + >>> conversations_models = AsyncConversationsModels(async_api_call) + >>> model = await conversations_models.create( + ... { + ... "api_key": "key", + ... "model_name": "openai/gpt-3.5-turbo", + ... "history_collection": "conversation_store", + ... } + ... ) + """ + response: ConversationModelSchema = await self.api_call.post( + endpoint=AsyncConversationsModels.resource_path, + entity_type=ConversationModelSchema, + as_json=True, + body=model, + ) + return response + + async def retrieve(self) -> typing.List[ConversationModelSchema]: + """ + Retrieve all conversation models. + + Returns: + List[ConversationModelSchema]: A list of all conversation models. + + Example: + >>> conversations_models = AsyncConversationsModels(async_api_call) + >>> all_models = await conversations_models.retrieve() + >>> for model in all_models: + ... print(model["id"]) + """ + response: typing.List[ConversationModelSchema] = await self.api_call.get( + endpoint=AsyncConversationsModels.resource_path, + entity_type=typing.List[ConversationModelSchema], + as_json=True, + ) + return response diff --git a/src/typesense/async_/curation_set.py b/src/typesense/async_/curation_set.py new file mode 100644 index 0000000..b2ce069 --- /dev/null +++ b/src/typesense/async_/curation_set.py @@ -0,0 +1,211 @@ +""" +This module provides async functionality for managing individual curation sets in Typesense. + +It contains the AsyncCurationSet class, which allows for retrieving, updating, deleting, +and managing items within a curation set asynchronously. + +Classes: + AsyncCurationSet: Manages async operations on a single curation set in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.curation_set: Provides various curation set schema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from typesense.types.curation_set import ( + CurationItemDeleteSchema, + CurationItemSchema, + CurationSetDeleteSchema, + CurationSetListItemResponseSchema, + CurationSetSchema, + CurationSetUpsertSchema, +) + + +class AsyncCurationSet: + """ + Manages async operations on a single curation set in the Typesense API. + + This class provides async methods to retrieve, update, and delete a curation set, + as well as manage items within the curation set. + + Attributes: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + name (str): The name of the curation set. + """ + + def __init__(self, api_call: AsyncApiCall, name: str) -> None: + """ + Initialize the AsyncCurationSet instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + name (str): The name of the curation set. + """ + self.api_call = api_call + self.name = name + + @property + def _endpoint_path(self) -> str: + """ + Get the API endpoint path for this curation set. + + Returns: + str: The full endpoint path for the curation set. + """ + from .curation_sets import AsyncCurationSets + + return "/".join([AsyncCurationSets.resource_path, self.name]) + + async def retrieve(self) -> CurationSetSchema: + """ + Retrieve this specific curation set. + + Returns: + CurationSetSchema: The schema containing the curation set details. + """ + response: CurationSetSchema = await self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=CurationSetSchema, + ) + return response + + async def delete(self) -> CurationSetDeleteSchema: + """ + Delete this specific curation set. + + Returns: + CurationSetDeleteSchema: The schema containing the deletion response. + """ + response: CurationSetDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=CurationSetDeleteSchema, + ) + return response + + async def upsert( + self, + payload: CurationSetUpsertSchema, + ) -> CurationSetSchema: + """ + Create or update this curation set. + + Args: + payload (CurationSetUpsertSchema): The schema for creating or updating the curation set. + + Returns: + CurationSetSchema: The created or updated curation set. + """ + response: CurationSetSchema = await self.api_call.put( + "/".join([self._endpoint_path]), + body=payload, + entity_type=CurationSetSchema, + ) + return response + + # Items sub-resource + @property + def _items_path(self) -> str: + """ + Get the API endpoint path for items in this curation set. + + Returns: + str: The full endpoint path for items (e.g., /curation_sets/{name}/items). + """ + return "/".join([self._endpoint_path, "items"]) + + async def list_items( + self, + *, + limit: typing.Union[int, None] = None, + offset: typing.Union[int, None] = None, + ) -> CurationSetListItemResponseSchema: + """ + List items in this curation set. + + Args: + limit (Union[int, None], optional): Maximum number of items to return. Defaults to None. + offset (Union[int, None], optional): Number of items to skip. Defaults to None. + + Returns: + CurationSetListItemResponseSchema: The list of items in the curation set. + """ + params: typing.Dict[str, typing.Union[int, None]] = { + "limit": limit, + "offset": offset, + } + # Filter out None values to avoid sending them + clean_params: typing.Dict[str, int] = { + k: v for k, v in params.items() if v is not None + } + response: CurationSetListItemResponseSchema = await self.api_call.get( + self._items_path, + as_json=True, + entity_type=CurationSetListItemResponseSchema, + params=clean_params or None, + ) + return response + + async def get_item(self, item_id: str) -> CurationItemSchema: + """ + Get a specific item from this curation set. + + Args: + item_id (str): The ID of the item to retrieve. + + Returns: + CurationItemSchema: The item schema. + """ + response: CurationItemSchema = await self.api_call.get( + "/".join([self._items_path, item_id]), + as_json=True, + entity_type=CurationItemSchema, + ) + return response + + async def upsert_item( + self, item_id: str, item: CurationItemSchema + ) -> CurationItemSchema: + """ + Create or update an item in this curation set. + + Args: + item_id (str): The ID of the item. + item (CurationItemSchema): The item schema. + + Returns: + CurationItemSchema: The created or updated item. + """ + response: CurationItemSchema = await self.api_call.put( + "/".join([self._items_path, item_id]), + body=item, + entity_type=CurationItemSchema, + ) + return response + + async def delete_item(self, item_id: str) -> CurationItemDeleteSchema: + """ + Delete an item from this curation set. + + Args: + item_id (str): The ID of the item to delete. + + Returns: + CurationItemDeleteSchema: The deletion response. + """ + response: CurationItemDeleteSchema = await self.api_call.delete( + "/".join([self._items_path, item_id]), + entity_type=CurationItemDeleteSchema, + ) + return response diff --git a/src/typesense/async_/curation_sets.py b/src/typesense/async_/curation_sets.py new file mode 100644 index 0000000..b64b8e4 --- /dev/null +++ b/src/typesense/async_/curation_sets.py @@ -0,0 +1,91 @@ +""" +This module provides async functionality for managing curation sets in Typesense. + +It contains the AsyncCurationSets class, which allows for retrieving and +accessing individual curation sets asynchronously. + +Classes: + AsyncCurationSets: Manages curation sets in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_curation_set: Provides the AsyncCurationSet class for individual curation set operations. + - typesense.types.curation_set: Provides CurationSetsListResponseSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from .curation_set import AsyncCurationSet +from typesense.types.curation_set import CurationSetsListResponseSchema + + +class AsyncCurationSets: + """ + Manages curation sets in the Typesense API (async). + + This class provides async methods to retrieve and access individual curation sets. + + Attributes: + resource_path (str): The API endpoint path for curation sets operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/curation_sets" + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncCurationSets instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + + async def retrieve(self) -> CurationSetsListResponseSchema: + """ + Retrieve all curation sets. + + Returns: + CurationSetsListResponseSchema: The list of all curation sets. + + Example: + >>> curation_sets = AsyncCurationSets(async_api_call) + >>> all_sets = await curation_sets.retrieve() + >>> for set in all_sets: + ... print(set["name"]) + """ + response: CurationSetsListResponseSchema = await self.api_call.get( + AsyncCurationSets.resource_path, + as_json=True, + entity_type=CurationSetsListResponseSchema, + ) + return response + + def __getitem__(self, curation_set_name: str) -> AsyncCurationSet: + """ + Get or create an AsyncCurationSet instance for a given curation set name. + + This method allows accessing curation sets using dictionary-like syntax. + If the AsyncCurationSet instance doesn't exist, it creates a new one. + + Args: + curation_set_name (str): The name of the curation set. + + Returns: + AsyncCurationSet: The AsyncCurationSet instance for the specified name. + + Example: + >>> curation_sets = AsyncCurationSets(async_api_call) + >>> products_set = curation_sets["products"] + """ + from .curation_set import AsyncCurationSet as PerSet + + return PerSet(self.api_call, curation_set_name) diff --git a/src/typesense/async_/debug.py b/src/typesense/async_/debug.py new file mode 100644 index 0000000..ecf49cc --- /dev/null +++ b/src/typesense/async_/debug.py @@ -0,0 +1,71 @@ +""" +This module provides async functionality for accessing debug information in Typesense. + +It contains the AsyncDebug class, which allows for retrieving debug information +asynchronously. + +Classes: + AsyncDebug: Manages async operations for accessing debug information in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.debug: Provides DebugResponseSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from typesense.types.debug import DebugResponseSchema + + +class AsyncDebug: + """ + Manages async operations for accessing debug information in the Typesense API. + + This class provides async methods to retrieve debug information from the Typesense server, + which can be useful for system diagnostics and troubleshooting. + + Attributes: + resource_path (str): The API resource path for debug operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/debug" + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncDebug instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + + async def retrieve(self) -> DebugResponseSchema: + """ + Retrieve debug information from the Typesense server. + + This method sends an async GET request to the debug endpoint and returns + the server's debug information. + + Returns: + DebugResponseSchema: A schema containing the debug information. + + Example: + >>> debug = AsyncDebug(async_api_call) + >>> info = await debug.retrieve() + >>> print(info["version"]) + """ + response: DebugResponseSchema = await self.api_call.get( + AsyncDebug.resource_path, + as_json=True, + entity_type=DebugResponseSchema, + ) + return response diff --git a/src/typesense/async_/document.py b/src/typesense/async_/document.py new file mode 100644 index 0000000..5b907c3 --- /dev/null +++ b/src/typesense/async_/document.py @@ -0,0 +1,150 @@ +""" +This module provides async functionality for managing individual documents in Typesense collections. + +It contains the AsyncDocument class, which allows for retrieving, updating, and deleting +documents asynchronously. + +Classes: + AsyncDocument: Manages async operations on a single document in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.document: Provides various document schema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import AsyncApiCall +from typesense.types.document import ( + DeleteSingleDocumentParameters, + DirtyValuesParameters, + DocumentSchema, + RetrieveParameters, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + + +class AsyncDocument(typing.Generic[TDoc]): + """ + Manages async operations on a single document in the Typesense API. + + This class provides async methods to retrieve, update, and delete a document. + + Attributes: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + collection_name (str): The name of the collection. + document_id (str): The ID of the document. + """ + + def __init__( + self, + api_call: AsyncApiCall, + collection_name: str, + document_id: str, + ) -> None: + """ + Initialize the AsyncDocument instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + collection_name (str): The name of the collection. + document_id (str): The ID of the document. + """ + self.api_call = api_call + self.collection_name = collection_name + self.document_id = document_id + + async def retrieve( + self, + retrieve_parameters: typing.Union[RetrieveParameters, None] = None, + ) -> TDoc: + """ + Retrieve this specific document. + + Args: + retrieve_parameters (Union[RetrieveParameters, None], optional): + Parameters for retrieving the document. + + Returns: + TDoc: The retrieved document. + """ + response = await self.api_call.get( + endpoint=self._endpoint_path, + entity_type=typing.Dict[str, str], + as_json=True, + params=retrieve_parameters, + ) + return typing.cast(TDoc, response) + + async def update( + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> TDoc: + """ + Update this specific document. + + Args: + document (TDoc): The updated document data. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The updated document. + """ + response = await self.api_call.patch( + self._endpoint_path, + body=document, + params=dirty_values_parameters, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) + + async def delete( + self, + delete_parameters: typing.Union[DeleteSingleDocumentParameters, None] = None, + ) -> TDoc: + """ + Delete this specific document. + + Args: + delete_parameters (Union[DeleteSingleDocumentParameters, None], optional): + Parameters for deletion. + + Returns: + TDoc: The deleted document. + """ + response = await self.api_call.delete( + self._endpoint_path, + entity_type=typing.Dict[str, str], + params=delete_parameters, + ) + return typing.cast(TDoc, response) + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific document. + + Returns: + str: The constructed endpoint path. + """ + from .collections import AsyncCollections + from .documents import AsyncDocuments + + return "/".join( + [ + AsyncCollections.resource_path, + self.collection_name, + AsyncDocuments.resource_path, + self.document_id, + ], + ) diff --git a/src/typesense/async_/documents.py b/src/typesense/async_/documents.py new file mode 100644 index 0000000..8228762 --- /dev/null +++ b/src/typesense/async_/documents.py @@ -0,0 +1,453 @@ +""" +This module provides async functionality for managing documents in Typesense collections. + +It contains the AsyncDocuments class, which allows for creating, updating, importing, exporting, +searching, and deleting documents asynchronously. + +Classes: + AsyncDocuments: Manages async operations on documents in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_document: Provides the AsyncDocument class for individual document operations. + - typesense.types.document: Provides various document schema types. + - typesense.preprocess: Provides stringify_search_params for search parameter processing. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import json +import sys + +from .api_call import AsyncApiCall +from .document import AsyncDocument +from typesense.exceptions import TypesenseClientError +from typesense.logger import logger +from typesense.preprocess import stringify_search_params +from typesense.types.document import ( + DeleteQueryParameters, + DeleteResponse, + DirtyValuesParameters, + DocumentExportParameters, + DocumentImportParameters, + DocumentImportParametersReturnDoc, + DocumentImportParametersReturnDocAndId, + DocumentImportParametersReturnId, + DocumentSchema, + DocumentWriteParameters, + ImportResponse, + ImportResponseFail, + ImportResponseSuccess, + ImportResponseWithDoc, + ImportResponseWithDocAndId, + ImportResponseWithId, + SearchParameters, + SearchResponse, + UpdateByFilterParameters, + UpdateByFilterResponse, +) + +# mypy: disable-error-code="misc" + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + +_ImportParameters = typing.Union[ + DocumentImportParameters, + None, +] + + +class AsyncDocuments(typing.Generic[TDoc]): + """ + Manages async operations on documents in the Typesense API. + + This class provides async methods to interact with documents, including + creating, updating, importing, exporting, searching, and deleting them. + + Attributes: + resource_path (str): The API resource path for document operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + collection_name (str): The name of the collection. + documents (Dict[str, AsyncDocument[TDoc]]): A dictionary of AsyncDocument instances. + """ + + resource_path: typing.Final[str] = "documents" + + def __init__(self, api_call: AsyncApiCall, collection_name: str) -> None: + """ + Initialize the AsyncDocuments instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + collection_name (str): The name of the collection. + """ + self.api_call = api_call + self.collection_name = collection_name + self.documents: typing.Dict[str, AsyncDocument[TDoc]] = {} + + def __getitem__(self, document_id: str) -> AsyncDocument[TDoc]: + """ + Get or create an AsyncDocument instance for a given document ID. + + Args: + document_id (str): The ID of the document. + + Returns: + AsyncDocument[TDoc]: The AsyncDocument instance for the specified document ID. + """ + if document_id not in self.documents: + self.documents[document_id] = AsyncDocument( + self.api_call, + self.collection_name, + document_id, + ) + + return self.documents[document_id] + + async def create( + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> TDoc: + """ + Create a new document in the collection. + + Args: + document (TDoc): The document to create. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The created document. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "create" + response = await self.api_call.post( + self._endpoint_path(), + body=document, + params=dirty_values_parameters, + as_json=True, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) + + async def create_many( + self, + documents: typing.List[TDoc], + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: + """ + Create multiple documents in the collection. + + Args: + documents (List[TDoc]): The list of documents to create. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + List[Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: + The list of import responses. + """ + logger.warn("`create_many` is deprecated: please use `import_`.") + return await self.import_(documents, dirty_values_parameters) + + async def upsert( + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> TDoc: + """ + Create or update a document in the collection. + + Args: + document (TDoc): The document to upsert. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The upserted document. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "upsert" + response = await self.api_call.post( + self._endpoint_path(), + body=document, + params=dirty_values_parameters, + as_json=True, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) + + async def update( + self, + document: TDoc, + dirty_values_parameters: typing.Union[UpdateByFilterParameters, None] = None, + ) -> UpdateByFilterResponse: + """ + Update a document in the collection. + + Args: + document (TDoc): The document to update. + dirty_values_parameters (Union[UpdateByFilterParameters, None], optional): + Parameters for handling dirty values and filtering. + + Returns: + UpdateByFilterResponse: The response containing information about the update. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "update" + response: UpdateByFilterResponse = await self.api_call.patch( + self._endpoint_path(), + body=document, + params=dirty_values_parameters, + entity_type=UpdateByFilterResponse, + ) + return response + + async def import_jsonl(self, documents_jsonl: str) -> str: + """ + Import documents from a JSONL string. + + Args: + documents_jsonl (str): The JSONL string containing documents to import. + + Returns: + str: The import response as a string. + """ + logger.warning("`import_jsonl` is deprecated: please use `import_`.") + return await self.import_(documents_jsonl) + + @typing.overload + async def import_( + self, + documents: typing.List[TDoc], + import_parameters: DocumentImportParametersReturnDocAndId, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ + typing.Union[ImportResponseWithDocAndId[TDoc], ImportResponseFail[TDoc]] + ]: ... + + @typing.overload + async def import_( + self, + documents: typing.List[TDoc], + import_parameters: DocumentImportParametersReturnId, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[typing.Union[ImportResponseWithId, ImportResponseFail[TDoc]]]: ... + + @typing.overload + async def import_( + self, + documents: typing.List[TDoc], + import_parameters: typing.Union[DocumentWriteParameters, None] = None, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: ... + + @typing.overload + async def import_( + self, + documents: typing.List[TDoc], + import_parameters: DocumentImportParametersReturnDoc, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ + typing.Union[ImportResponseWithDoc[TDoc], ImportResponseFail[TDoc]] + ]: ... + + @typing.overload + async def import_( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ImportResponse[TDoc]]: ... + + @typing.overload + async def import_( + self, + documents: typing.Union[bytes, str], + import_parameters: _ImportParameters = None, + batch_size: typing.Union[int, None] = None, + ) -> str: ... + + async def import_( + self, + documents: typing.Union[bytes, str, typing.List[TDoc]], + import_parameters: _ImportParameters = None, + batch_size: typing.Union[int, None] = None, + ) -> typing.Union[ImportResponse[TDoc], str]: + """ + Import documents into the collection. + + This method supports various input types and import parameters. + It can handle both individual documents and batches of documents. + + Args: + documents: The documents to import. + import_parameters: Parameters for the import operation. + batch_size: The size of each batch for batch imports. + + Returns: + The import response, which can be a list of responses or a string. + + Raises: + TypesenseClientError: If an empty list of documents is provided. + """ + if isinstance(documents, (str, bytes)): + return await self._import_raw(documents, import_parameters) + + if batch_size: + return await self._batch_import(documents, import_parameters, batch_size) + + return await self._bulk_import(documents, import_parameters) + + async def export( + self, + export_parameters: typing.Union[DocumentExportParameters, None] = None, + ) -> str: + """ + Export documents from the collection. + + Args: + export_parameters (Union[DocumentExportParameters, None], optional): + Parameters for the export operation. + + Returns: + str: The exported documents as a string. + """ + api_response: str = await self.api_call.get( + self._endpoint_path("export"), + params=export_parameters, + as_json=False, + entity_type=str, + ) + return api_response + + async def search(self, search_parameters: SearchParameters) -> SearchResponse[TDoc]: + """ + Search for documents in the collection. + + Args: + search_parameters (SearchParameters): The search parameters. + + Returns: + SearchResponse[TDoc]: The search response containing matching documents. + """ + stringified_search_params = stringify_search_params(search_parameters) + response: SearchResponse[TDoc] = await self.api_call.get( + self._endpoint_path("search"), + params=stringified_search_params, + entity_type=SearchResponse, + as_json=True, + ) + return response + + async def delete( + self, + delete_parameters: typing.Union[DeleteQueryParameters, None] = None, + ) -> DeleteResponse: + """ + Delete documents from the collection based on given parameters. + + Args: + delete_parameters (Union[DeleteQueryParameters, None], optional): + Parameters for deletion. + + Returns: + DeleteResponse: The response containing information about the deletion. + """ + response: DeleteResponse = await self.api_call.delete( + self._endpoint_path(), + params=delete_parameters, + entity_type=DeleteResponse, + ) + return response + + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for document operations. + + Args: + action (Union[str, None], optional): The action to perform. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from .collections import AsyncCollections + + action = action or "" + return "/".join( + [ + AsyncCollections.resource_path, + self.collection_name, + self.resource_path, + action, + ], + ) + + async def _import_raw( + self, + documents: typing.Union[bytes, str], + import_parameters: _ImportParameters, + ) -> str: + """Import raw document data.""" + response: str = await self.api_call.post( + self._endpoint_path("import"), + body=documents, + params=import_parameters, + as_json=False, + entity_type=str, + ) + + return response + + async def _batch_import( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + batch_size: int, + ) -> ImportResponse[TDoc]: + """Import documents in batches.""" + response_objs: ImportResponse[TDoc] = [] + for batch_index in range(0, len(documents), batch_size): + batch = documents[batch_index : batch_index + batch_size] + api_response = await self._bulk_import(batch, import_parameters) + response_objs.extend(api_response) + return response_objs + + async def _bulk_import( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + ) -> ImportResponse[TDoc]: + """Import a list of documents in bulk.""" + document_strs = [json.dumps(doc) for doc in documents] + if not document_strs: + raise TypesenseClientError("Cannot import an empty list of documents.") + + docs_import = "\n".join(document_strs) + res = await self.api_call.post( + self._endpoint_path("import"), + body=docs_import, + params=import_parameters, + entity_type=str, + as_json=False, + ) + return self._parse_import_response(res) + + def _parse_import_response(self, response: str) -> ImportResponse[TDoc]: + """Parse the import response string into a list of response objects.""" + response_objs: typing.List[ImportResponse] = [] + for res_obj_str in response.split("\n"): + try: + res_obj_json = json.loads(res_obj_str) + except json.JSONDecodeError as decode_error: + raise TypesenseClientError( + f"Invalid response - {res_obj_str}", + ) from decode_error + response_objs.append(res_obj_json) + return response_objs diff --git a/src/typesense/async_/key.py b/src/typesense/async_/key.py new file mode 100644 index 0000000..e82cc93 --- /dev/null +++ b/src/typesense/async_/key.py @@ -0,0 +1,80 @@ +""" +This module provides async functionality for managing individual API keys in Typesense. + +It contains the AsyncKey class, which allows for retrieving and deleting +API keys asynchronously. + +Classes: + AsyncKey: Manages async operations on a single API key in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.key: Provides ApiKeyDeleteSchema and ApiKeySchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import AsyncApiCall +from typesense.types.key import ApiKeyDeleteSchema, ApiKeySchema + + +class AsyncKey: + """ + Manages async operations on a single API key in the Typesense API. + + This class provides async methods to retrieve and delete an API key. + + Attributes: + key_id (int): The ID of the API key. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + def __init__(self, api_call: AsyncApiCall, key_id: int) -> None: + """ + Initialize the AsyncKey instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + key_id (int): The ID of the API key. + """ + self.key_id = key_id + self.api_call = api_call + + async def retrieve(self) -> ApiKeySchema: + """ + Retrieve this specific API key. + + Returns: + ApiKeySchema: The schema containing the API key details. + """ + response: ApiKeySchema = await self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=ApiKeySchema, + ) + return response + + async def delete(self) -> ApiKeyDeleteSchema: + """ + Delete this specific API key. + + Returns: + ApiKeyDeleteSchema: The schema containing the deletion response. + """ + response: ApiKeyDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=ApiKeyDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific API key. + + Returns: + str: The constructed endpoint path. + """ + from .keys import AsyncKeys + + return "/".join([AsyncKeys.resource_path, str(self.key_id)]) diff --git a/src/typesense/async_/keys.py b/src/typesense/async_/keys.py new file mode 100644 index 0000000..0dd8d94 --- /dev/null +++ b/src/typesense/async_/keys.py @@ -0,0 +1,170 @@ +""" +This module provides async functionality for managing API keys in Typesense. + +It contains the AsyncKeys class, which allows for creating, retrieving, and +generating scoped search keys asynchronously. + +Classes: + AsyncKeys: Manages API keys in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_key: Provides the AsyncKey class for individual API key operations. + - typesense.types.document: Provides GenerateScopedSearchKeyParams type. + - typesense.types.key: Provides various API key schema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import base64 +import hashlib +import hmac +import json +import sys + +from .api_call import AsyncApiCall +from .key import AsyncKey +from typesense.types.document import GenerateScopedSearchKeyParams +from typesense.types.key import ( + ApiKeyCreateResponseSchema, + ApiKeyCreateSchema, + ApiKeyRetrieveSchema, + ApiKeySchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AsyncKeys: + """ + Manages API keys in the Typesense API (async). + + This class provides async methods to create, retrieve, and generate scoped search keys. + + Attributes: + resource_path (str): The API endpoint path for key operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + keys (Dict[int, AsyncKey]): A dictionary of AsyncKey instances, keyed by key ID. + """ + + resource_path: typing.Final[str] = "/keys" + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncKeys instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + self.keys: typing.Dict[int, AsyncKey] = {} + + def __getitem__(self, key_id: int) -> AsyncKey: + """ + Get or create an AsyncKey instance for a given key ID. + + This method allows accessing API keys using dictionary-like syntax. + If the AsyncKey instance doesn't exist, it creates a new one. + + Args: + key_id (int): The ID of the API key. + + Returns: + AsyncKey: The AsyncKey instance for the specified key ID. + + Example: + >>> keys = AsyncKeys(async_api_call) + >>> key = keys[1] + """ + if not self.keys.get(key_id): + self.keys[key_id] = AsyncKey(self.api_call, key_id) + return self.keys[key_id] + + async def create(self, schema: ApiKeyCreateSchema) -> ApiKeyCreateResponseSchema: + """ + Create a new API key. + + Args: + schema (ApiKeyCreateSchema): The schema for creating the API key. + + Returns: + ApiKeyCreateResponseSchema: The created API key. + + Example: + >>> keys = AsyncKeys(async_api_call) + >>> key = await keys.create( + ... { + ... "actions": ["documents:search"], + ... "collections": ["companies"], + ... "description": "Search-only key", + ... } + ... ) + """ + response: ApiKeySchema = await self.api_call.post( + AsyncKeys.resource_path, + as_json=True, + body=schema, + entity_type=ApiKeySchema, + ) + return response + + def generate_scoped_search_key( + self, + search_key: str, + key_parameters: GenerateScopedSearchKeyParams, + ) -> bytes: + """ + Generate a scoped search key. + + Note: This is a synchronous method as it performs local computation + and does not make any API calls. Only a key generated with the + `documents:search` action will be accepted by the server. + + Args: + search_key (str): The search key to use as a base. + key_parameters (GenerateScopedSearchKeyParams): Parameters for the scoped key. + + Returns: + bytes: The generated scoped search key. + + Example: + >>> keys = AsyncKeys(async_api_call) + >>> scoped_key = keys.generate_scoped_search_key( + ... "KmacipDKNqAM3YiigXfw5pZvNOrPQUba", + ... {"q": "search query", "collection": "companies"}, + ... ) + """ + params_str = json.dumps(key_parameters) + digest = base64.b64encode( + hmac.new( + search_key.encode("utf-8"), + params_str.encode("utf-8"), + digestmod=hashlib.sha256, + ).digest(), + ) + key_prefix = search_key[:4] + raw_scoped_key = f"{digest.decode('utf-8')}{key_prefix}{params_str}" + return base64.b64encode(raw_scoped_key.encode("utf-8")) + + async def retrieve(self) -> ApiKeyRetrieveSchema: + """ + Retrieve all API keys. + + Returns: + ApiKeyRetrieveSchema: The schema containing all API keys. + + Example: + >>> keys = AsyncKeys(async_api_call) + >>> all_keys = await keys.retrieve() + >>> for key in all_keys["keys"]: + ... print(key["id"]) + """ + response: ApiKeyRetrieveSchema = await self.api_call.get( + AsyncKeys.resource_path, + entity_type=ApiKeyRetrieveSchema, + as_json=True, + ) + return response diff --git a/src/typesense/async_/metrics.py b/src/typesense/async_/metrics.py new file mode 100644 index 0000000..b11347f --- /dev/null +++ b/src/typesense/async_/metrics.py @@ -0,0 +1,69 @@ +""" +This module provides async functionality for retrieving metrics from the Typesense API. + +It contains the AsyncMetrics class, which handles async API operations for retrieving +system and Typesense metrics such as CPU, memory, disk, and network usage. + +Classes: + MetricsResponse: Type definition for metrics response (imported from typesense.types.metrics). + AsyncMetrics: Manages async retrieval of metrics from the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.metrics: Provides MetricsResponse type definitions. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from typesense.types.metrics import MetricsResponse + + +class AsyncMetrics: + """ + Manages async metrics retrieval from the Typesense API. + + This class provides async methods to retrieve system and Typesense metrics + such as CPU, memory, disk, and network usage. + + Attributes: + resource_path (str): The base path for metrics endpoint. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/metrics.json" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncMetrics instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + + async def retrieve(self) -> MetricsResponse: + """ + Retrieve metrics from the Typesense API. + + Returns: + MetricsResponse: A dictionary containing system and Typesense metrics. + + Example: + >>> metrics = AsyncMetrics(async_api_call) + >>> response = await metrics.retrieve() + >>> print(response["system_cpu_active_percentage"]) + """ + response: MetricsResponse = await self.api_call.get( + AsyncMetrics.resource_path, + as_json=True, + entity_type=MetricsResponse, + ) + return response diff --git a/src/typesense/async_/multi_search.py b/src/typesense/async_/multi_search.py new file mode 100644 index 0000000..466ac51 --- /dev/null +++ b/src/typesense/async_/multi_search.py @@ -0,0 +1,108 @@ +""" +This module provides async functionality for performing multi-search operations in the Typesense API. + +It contains the AsyncMultiSearch class, which allows for executing multiple search queries +asynchronously in a single API call. + +Classes: + AsyncMultiSearch: Manages async multi-search operations in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.preprocess: Provides the stringify_search_params function for parameter processing. + - typesense.types.document: Provides the MultiSearchCommonParameters type. + - typesense.types.multi_search: Provides MultiSearchRequestSchema and MultiSearchResponse types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import AsyncApiCall +from typesense.preprocess import stringify_search_params +from typesense.types.document import MultiSearchCommonParameters +from typesense.types.multi_search import MultiSearchRequestSchema, MultiSearchResponse + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AsyncMultiSearch: + """ + Manages async multi-search operations in the Typesense API. + + This class provides async methods to perform multiple search queries in a single API call. + + Attributes: + resource_path (str): The API endpoint path for multi-search operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/multi_search" + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncMultiSearch instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + + async def perform( + self, + search_queries: MultiSearchRequestSchema, + common_params: typing.Union[MultiSearchCommonParameters, None] = None, + ) -> MultiSearchResponse: + """ + Perform a multi-search operation. + + This method allows executing multiple search queries in a single API call. + It processes the search parameters, sends the request to the Typesense API, + and returns the multi-search response. + + Args: + search_queries (MultiSearchRequestSchema): + A dictionary containing the list of search queries to perform. + The dictionary should have a 'searches' key with a list of search + parameter dictionaries. + common_params (Union[MultiSearchCommonParameters, None], optional): + Common parameters to apply to all search queries. Defaults to None. + + Returns: + MultiSearchResponse: + The response from the multi-search operation, containing + the results of all search queries. + + Example: + >>> multi_search = AsyncMultiSearch(async_api_call) + >>> response = await multi_search.perform( + ... { + ... "searches": [ + ... { + ... "q": "com", + ... "query_by": "company_name", + ... "collection": "companies", + ... }, + ... ], + ... } + ... ) + """ + stringified_search_params = [ + stringify_search_params(search_params) + for search_params in search_queries.get("searches") + ] + search_body = { + "searches": stringified_search_params, + "union": search_queries.get("union", False), + } + response: MultiSearchResponse = await self.api_call.post( + AsyncMultiSearch.resource_path, + body=search_body, + params=common_params, + as_json=True, + entity_type=MultiSearchResponse, + ) + return response diff --git a/src/typesense/async_/nl_search_model.py b/src/typesense/async_/nl_search_model.py new file mode 100644 index 0000000..c7983e1 --- /dev/null +++ b/src/typesense/async_/nl_search_model.py @@ -0,0 +1,102 @@ +""" +This module provides async functionality for managing individual NL search models in Typesense. + +It contains the AsyncNLSearchModel class, which allows for retrieving, updating, and deleting +NL search models asynchronously. + +Classes: + AsyncNLSearchModel: Manages async operations on a single NL search model in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.types.nl_search_model: Provides NLSearchModelDeleteSchema, NLSearchModelSchema, and NLSearchModelUpdateSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import AsyncApiCall +from typesense.types.nl_search_model import ( + NLSearchModelDeleteSchema, + NLSearchModelSchema, + NLSearchModelUpdateSchema, +) + + +class AsyncNLSearchModel: + """ + Manages async operations on a single NL search model in the Typesense API. + + This class provides async methods to retrieve, update, and delete an NL search model. + + Attributes: + model_id (str): The ID of the NL search model. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + def __init__(self, api_call: AsyncApiCall, model_id: str) -> None: + """ + Initialize the AsyncNLSearchModel instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + model_id (str): The ID of the NL search model. + """ + self.model_id = model_id + self.api_call = api_call + + async def retrieve(self) -> NLSearchModelSchema: + """ + Retrieve this specific NL search model. + + Returns: + NLSearchModelSchema: The schema containing the NL search model details. + """ + response: NLSearchModelSchema = await self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=NLSearchModelSchema, + ) + return response + + async def update(self, model: NLSearchModelUpdateSchema) -> NLSearchModelSchema: + """ + Update this specific NL search model. + + Args: + model (NLSearchModelUpdateSchema): + The schema containing the updated model details. + + Returns: + NLSearchModelSchema: The schema containing the updated NL search model. + """ + response: NLSearchModelSchema = await self.api_call.put( + self._endpoint_path, + body=model, + entity_type=NLSearchModelSchema, + ) + return response + + async def delete(self) -> NLSearchModelDeleteSchema: + """ + Delete this specific NL search model. + + Returns: + NLSearchModelDeleteSchema: The schema containing the deletion response. + """ + response: NLSearchModelDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=NLSearchModelDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific NL search model. + + Returns: + str: The constructed endpoint path. + """ + from .nl_search_models import AsyncNLSearchModels + + return "/".join([AsyncNLSearchModels.resource_path, self.model_id]) diff --git a/src/typesense/async_/nl_search_models.py b/src/typesense/async_/nl_search_models.py new file mode 100644 index 0000000..ea6b674 --- /dev/null +++ b/src/typesense/async_/nl_search_models.py @@ -0,0 +1,130 @@ +""" +This module provides async functionality for managing NL search models in Typesense. + +It contains the AsyncNLSearchModels class, which allows for creating, retrieving, and +accessing individual NL search models asynchronously. + +Classes: + AsyncNLSearchModels: Manages NL search models in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + - typesense.async_nl_search_model: Provides the AsyncNLSearchModel class for individual NL search model operations. + - typesense.types.nl_search_model: Provides NLSearchModelCreateSchema, NLSearchModelSchema, and NLSearchModelsRetrieveSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import AsyncApiCall +from .nl_search_model import AsyncNLSearchModel +from typesense.types.nl_search_model import ( + NLSearchModelCreateSchema, + NLSearchModelSchema, + NLSearchModelsRetrieveSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AsyncNLSearchModels: + """ + Manages NL search models in the Typesense API (async). + + This class provides async methods to create, retrieve, and access individual NL search models. + + Attributes: + resource_path (str): The API endpoint path for NL search models operations. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + nl_search_models (Dict[str, AsyncNLSearchModel]): + A dictionary of AsyncNLSearchModel instances, keyed by model ID. + """ + + resource_path: typing.Final[str] = "/nl_search_models" + + def __init__(self, api_call: AsyncApiCall) -> None: + """ + Initialize the AsyncNLSearchModels instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + self.nl_search_models: typing.Dict[str, AsyncNLSearchModel] = {} + + def __getitem__(self, model_id: str) -> AsyncNLSearchModel: + """ + Get or create an AsyncNLSearchModel instance for a given model ID. + + This method allows accessing NL search models using dictionary-like syntax. + If the AsyncNLSearchModel instance doesn't exist, it creates a new one. + + Args: + model_id (str): The ID of the NL search model. + + Returns: + AsyncNLSearchModel: The AsyncNLSearchModel instance for the specified model ID. + + Example: + >>> nl_search_models = AsyncNLSearchModels(async_api_call) + >>> model = nl_search_models["model_id"] + """ + if model_id not in self.nl_search_models: + self.nl_search_models[model_id] = AsyncNLSearchModel( + self.api_call, + model_id, + ) + return self.nl_search_models[model_id] + + async def create(self, model: NLSearchModelCreateSchema) -> NLSearchModelSchema: + """ + Create a new NL search model. + + Args: + model (NLSearchModelCreateSchema): + The schema for creating the NL search model. + + Returns: + NLSearchModelSchema: The created NL search model. + + Example: + >>> nl_search_models = AsyncNLSearchModels(async_api_call) + >>> model = await nl_search_models.create( + ... { + ... "api_key": "key", + ... "model_name": "openai/gpt-3.5-turbo", + ... "system_prompt": "System prompt", + ... } + ... ) + """ + response: NLSearchModelSchema = await self.api_call.post( + endpoint=AsyncNLSearchModels.resource_path, + entity_type=NLSearchModelSchema, + as_json=True, + body=model, + ) + return response + + async def retrieve(self) -> NLSearchModelsRetrieveSchema: + """ + Retrieve all NL search models. + + Returns: + NLSearchModelsRetrieveSchema: A list of all NL search models. + + Example: + >>> nl_search_models = AsyncNLSearchModels(async_api_call) + >>> all_models = await nl_search_models.retrieve() + >>> for model in all_models: + ... print(model["id"]) + """ + response: NLSearchModelsRetrieveSchema = await self.api_call.get( + endpoint=AsyncNLSearchModels.resource_path, + entity_type=NLSearchModelsRetrieveSchema, + as_json=True, + ) + return response diff --git a/src/typesense/async_/operations.py b/src/typesense/async_/operations.py new file mode 100644 index 0000000..ca61a1f --- /dev/null +++ b/src/typesense/async_/operations.py @@ -0,0 +1,279 @@ +""" +This module provides async functionality for performing various operations in the Typesense API. + +It contains the AsyncOperations class, which handles different API operations such as +health checks, snapshots, and configuration changes asynchronously. + +Classes: + AsyncOperations: Manages various async operations in the Typesense API. + +Dependencies: + - typesense.types.operations: + Provides type definitions for operation responses and parameters. + - typesense.async_api_call: Provides the AsyncApiCall class for making async API requests. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import AsyncApiCall +from typesense.types.operations import ( + HealthCheckResponse, + LogSlowRequestsTimeParams, + OperationResponse, + SchemaChangesResponse, + SnapshotParameters, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AsyncOperations: + """ + Manages various async operations in the Typesense API. + + This class provides async methods to perform different operations such as + health checks, snapshots, and configuration changes. + + Attributes: + resource_path (str): The base path for operations endpoints. + health_path (str): The path for the health check endpoint. + config_path (str): The path for the configuration endpoint. + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/operations" + health_path: typing.Final[str] = "/health" + config_path: typing.Final[str] = "/config" + schema_changes: typing.Final[str] = "/schema_changes" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncOperations instance. + + Args: + api_call (AsyncApiCall): The AsyncApiCall instance for making async API requests. + """ + self.api_call = api_call + + @typing.overload + async def perform( + self, + operation_name: typing.Literal["schema_changes"], + query_params: None = None, + ) -> typing.List[SchemaChangesResponse]: + """ + Perform a schema_changes operation. + + Args: + operation_name (Literal["schema_changes"]): The name of the operation. + query_params (None, optional): Query parameters (not used for schema_changes operation). + + Returns: + List[SchemaChangesResponse]: The response from the schema_changes operation. + """ + + @typing.overload + async def perform( + self, + operation_name: typing.Literal["vote"], + query_params: None = None, + ) -> OperationResponse: + """ + Perform a vote operation. + + Args: + operation_name (Literal["vote"]): The name of the operation. + query_params (None, optional): Query parameters (not used for vote operation). + + Returns: + OperationResponse: The response from the vote operation. + """ + + @typing.overload + async def perform( + self, + operation_name: typing.Literal["db/compact"], + query_params: None = None, + ) -> OperationResponse: + """ + Perform a database compaction operation. + + Args: + operation_name (Literal["db/compact"]): The name of the operation. + query_params (None, optional): Query parameters (not used for db/compact operation). + + Returns: + OperationResponse: The response from the database compaction operation. + """ + + @typing.overload + async def perform( + self, + operation_name: typing.Literal["cache/clear"], + query_params: None = None, + ) -> OperationResponse: + """ + Perform a cache clear operation. + + Args: + operation_name (Literal["cache/clear"]): The name of the operation. + query_params (None, optional): + Query parameters (not used for cache/clear operation). + + Returns: + OperationResponse: The response from the cache clear operation. + """ + + @typing.overload + async def perform( + self, + operation_name: str, + query_params: typing.Union[typing.Dict[str, str], None] = None, + ) -> OperationResponse: + """ + Perform a generic operation. + + Args: + operation_name (str): The name of the operation. + query_params (Union[Dict[str, str], None], optional): + Query parameters for the operation. + + Returns: + OperationResponse: The response from the operation. + """ + + @typing.overload + async def perform( + self, + operation_name: typing.Literal["snapshot"], + query_params: SnapshotParameters, + ) -> OperationResponse: + """ + Perform a snapshot operation. + + Args: + operation_name (Literal["snapshot"]): The name of the operation. + query_params (SnapshotParameters): Query parameters for the snapshot operation. + + Returns: + OperationResponse: The response from the snapshot operation. + """ + + async def perform( + self, + operation_name: typing.Union[ + typing.Literal[ + "snapshot", + "vote", + "db/compact", + "cache/clear", + "schema_changes", + ], + str, + ], + query_params: typing.Union[ + SnapshotParameters, + typing.Dict[str, str], + None, + ] = None, + ) -> OperationResponse: + """ + Perform an operation on the Typesense API. + + This method is the actual implementation for all the overloaded perform methods. + + Args: + operation_name (Literal["snapshot, vote, db/compact, cache/clear, schema_changes"]): + The name of the operation to perform. + query_params (Union[SnapshotParameters, Dict[str, str], None], optional): + Query parameters for the operation. + + Returns: + Union[OperationResponse, List[SchemaChangesResponse]]: + The response from the performed operation. + + Example: + >>> operations = AsyncOperations(async_api_call) + >>> response = await operations.perform("vote") + >>> health = await operations.is_healthy() + """ + response: OperationResponse = await self.api_call.post( + self._endpoint_path(operation_name), + params=query_params, + as_json=True, + entity_type=OperationResponse, + ) + return response + + async def is_healthy(self) -> bool: + """ + Check if the Typesense server is healthy. + + Returns: + bool: True if the server is healthy, False otherwise. + + Example: + >>> operations = AsyncOperations(async_api_call) + >>> healthy = await operations.is_healthy() + >>> print(healthy) + """ + call_resp: HealthCheckResponse = await self.api_call.get( + AsyncOperations.health_path, + as_json=True, + entity_type=HealthCheckResponse, + ) + if isinstance(call_resp, typing.Dict): + is_ok: bool = call_resp.get("ok", False) + else: + is_ok = False + return is_ok + + async def toggle_slow_request_log( + self, + log_slow_requests_time_params: LogSlowRequestsTimeParams, + ) -> typing.Dict[str, typing.Union[str, bool]]: + """ + Toggle the slow request log configuration. + + Args: + log_slow_requests_time_params (LogSlowRequestsTimeParams): + Parameters for configuring slow request logging. + + Returns: + Dict[str, Union[str, bool]]: The response from the configuration change operation. + + Example: + >>> operations = AsyncOperations(async_api_call) + >>> response = await operations.toggle_slow_request_log( + ... {"log_slow_requests_time_ms": 100} + ... ) + """ + data_dashed = { + key.replace("_", "-"): dashed_value + for key, dashed_value in log_slow_requests_time_params.items() + } + response: typing.Dict[str, typing.Union[str, bool]] = await self.api_call.post( + AsyncOperations.config_path, + as_json=True, + entity_type=typing.Dict[str, typing.Union[str, bool]], + body=data_dashed, + ) + return response + + @staticmethod + def _endpoint_path(operation_name: str) -> str: + """ + Generate the endpoint path for a given operation. + + Args: + operation_name (str): The name of the operation. + + Returns: + str: The full endpoint path for the operation. + """ + return "/".join([AsyncOperations.resource_path, operation_name]) diff --git a/src/typesense/async_/override.py b/src/typesense/async_/override.py new file mode 100644 index 0000000..58e5a26 --- /dev/null +++ b/src/typesense/async_/override.py @@ -0,0 +1,112 @@ +""" +This module provides async functionality for managing individual overrides in Typesense. + +Classes: + - AsyncOverride: Handles async operations related to a specific override within a collection. + +Methods: + - __init__: Initializes the AsyncOverride object. + - retrieve: Retrieves the details of this specific override. + - delete: Deletes this specific override. + +Attributes: + - _endpoint_path: The API endpoint path for this specific override. + +The AsyncOverride class interacts with the Typesense API to manage operations on a +specific override within a collection. It provides methods to retrieve and delete +individual overrides. + +For more information regarding Overrides, refer to the Curation [documentation] +(https://typesense.org/docs/27.0/api/curation.html#curation). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import AsyncApiCall +from typesense.logger import warn_deprecation +from typesense.types.override import OverrideDeleteSchema, OverrideSchema + + +class AsyncOverride: + """ + Class for managing individual overrides in a Typesense collection (async). + + This class provides methods to interact with a specific override, + including retrieving and deleting it. + + Attributes: + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + override_id (str): The ID of the override. + """ + + def __init__( + self, + api_call: AsyncApiCall, + collection_name: str, + override_id: str, + ) -> None: + """ + Initialize the AsyncOverride object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + override_id (str): The ID of the override. + """ + self.api_call = api_call + self.collection_name = collection_name + self.override_id = override_id + + async def retrieve(self) -> OverrideSchema: + """ + Retrieve this specific override. + + Returns: + OverrideSchema: The schema containing the override details. + """ + response: OverrideSchema = await self.api_call.get( + self._endpoint_path, + entity_type=OverrideSchema, + as_json=True, + ) + return response + + async def delete(self) -> OverrideDeleteSchema: + """ + Delete this specific override. + + Returns: + OverrideDeleteSchema: The schema containing the deletion response. + """ + response: OverrideDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=OverrideDeleteSchema, + ) + return response + + @property + @warn_deprecation( # type: ignore[untyped-decorator] + "The override API (collections/{collection}/overrides/{override_id}) is deprecated is removed on v30+. " + "Use curation sets (curation_sets) instead.", + flag_name="overrides_deprecation", + ) + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific override. + + Returns: + str: The constructed endpoint path. + """ + from .collections import AsyncCollections + from .overrides import AsyncOverrides + + return "/".join( + [ + AsyncCollections.resource_path, + self.collection_name, + AsyncOverrides.resource_path, + self.override_id, + ], + ) diff --git a/src/typesense/async_/overrides.py b/src/typesense/async_/overrides.py new file mode 100644 index 0000000..b8e725b --- /dev/null +++ b/src/typesense/async_/overrides.py @@ -0,0 +1,157 @@ +""" +This module provides async functionality for managing overrides in Typesense. + +Classes: + - AsyncOverrides: Handles async operations related to overrides within a collection. + +Methods: + - __init__: Initializes the AsyncOverrides object. + - __getitem__: Retrieves or creates an AsyncOverride object for a given override_id. + - _endpoint_path: Constructs the API endpoint path for override operations. + - upsert: Creates or updates an override. + - retrieve: Retrieves all overrides for the collection. + +Attributes: + - RESOURCE_PATH: The API resource path for overrides. + +The AsyncOverrides class interacts with the Typesense API to manage override operations +within a specific collection. It provides methods to create, update, and retrieve +overrides, as well as access individual AsyncOverride objects. + +For more information regarding Overrides, refer to the Curation [documentation] +(https://typesense.org/docs/27.0/api/curation.html#curation). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typing_extensions import deprecated + +from .api_call import AsyncApiCall +from .override import AsyncOverride +from typesense.logger import warn_deprecation +from typesense.types.override import ( + OverrideCreateSchema, + OverrideRetrieveSchema, + OverrideSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +@deprecated("AsyncOverrides is deprecated on v30+. Use client.curation_sets instead.") +class AsyncOverrides: + """ + Class for managing overrides in a Typesense collection (async). + + This class provides methods to interact with overrides, including + retrieving, creating, and updating them. + + Attributes: + RESOURCE_PATH (str): The API resource path for overrides. + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + overrides (Dict[str, AsyncOverride]): A dictionary of AsyncOverride objects. + """ + + resource_path: typing.Final[str] = "overrides" + + def __init__( + self, + api_call: AsyncApiCall, + collection_name: str, + ) -> None: + """ + Initialize the AsyncOverrides object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ + self.api_call = api_call + self.collection_name = collection_name + self.overrides: typing.Dict[str, AsyncOverride] = {} + + def __getitem__(self, override_id: str) -> AsyncOverride: + """ + Get or create an AsyncOverride object for a given override_id. + + Args: + override_id (str): The ID of the override. + + Returns: + AsyncOverride: The AsyncOverride object for the given ID. + """ + if not self.overrides.get(override_id): + self.overrides[override_id] = AsyncOverride( + self.api_call, + self.collection_name, + override_id, + ) + return self.overrides[override_id] + + async def upsert( + self, override_id: str, schema: OverrideCreateSchema + ) -> OverrideSchema: + """ + Create or update an override. + + Args: + id (str): The ID of the override. + schema (OverrideCreateSchema): The schema for creating or updating the override. + + Returns: + OverrideSchema: The created or updated override. + """ + response: OverrideSchema = await self.api_call.put( + endpoint=self._endpoint_path(override_id), + entity_type=OverrideSchema, + body=schema, + ) + return response + + async def retrieve(self) -> OverrideRetrieveSchema: + """ + Retrieve all overrides for the collection. + + Returns: + OverrideRetrieveSchema: The schema containing all overrides. + """ + response: OverrideRetrieveSchema = await self.api_call.get( + self._endpoint_path(), + entity_type=OverrideRetrieveSchema, + as_json=True, + ) + return response + + @warn_deprecation( # type: ignore[untyped-decorator] + "AsyncOverrides is deprecated on v30+. Use client.curation_sets instead.", + flag_name="overrides_deprecation", + ) + def _endpoint_path(self, override_id: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for override operations. + + Args: + override_id (Union[str, None], optional): The ID of the override. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from .collections import AsyncCollections + + override_id = override_id or "" + + return "/".join( + [ + AsyncCollections.resource_path, + self.collection_name, + AsyncOverrides.resource_path, + override_id, + ], + ) diff --git a/src/typesense/async_/stemming.py b/src/typesense/async_/stemming.py new file mode 100644 index 0000000..3cacda3 --- /dev/null +++ b/src/typesense/async_/stemming.py @@ -0,0 +1,50 @@ +""" +Module for managing stemming dictionaries in Typesense (async). + +This module provides a class for managing stemming dictionaries in Typesense, +including creating, updating, and retrieving them asynchronously. + +Classes: + - AsyncStemming: Handles async operations related to stemming dictionaries. + +Attributes: + - AsyncStemmingDictionaries: The AsyncStemmingDictionaries object for managing stemming dictionaries. + +Methods: + - __init__: Initializes the AsyncStemming object. + +The AsyncStemming class interacts with the Typesense API to manage stemming dictionary operations. +It provides access to the AsyncStemmingDictionaries object for managing stemming dictionaries. + +For more information on stemming dictionaries, refer to the Stemming +[documentation](https://typesense.org/docs/28.0/api/stemming.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import AsyncApiCall +from .stemming_dictionaries import AsyncStemmingDictionaries + + +class AsyncStemming(object): + """ + Class for managing stemming dictionaries in Typesense (async). + + This class provides methods to interact with stemming dictionaries, including + creating, updating, and retrieving them. + + Attributes: + dictionaries (AsyncStemmingDictionaries): The AsyncStemmingDictionaries object for managing + stemming dictionaries. + """ + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncStemming object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.dictionaries = AsyncStemmingDictionaries(api_call) diff --git a/src/typesense/async_/stemming_dictionaries.py b/src/typesense/async_/stemming_dictionaries.py new file mode 100644 index 0000000..6c1e2dd --- /dev/null +++ b/src/typesense/async_/stemming_dictionaries.py @@ -0,0 +1,185 @@ +""" +Module for interacting with the stemming dictionaries endpoint of the Typesense API (async). + +This module provides a class for managing stemming dictionaries in Typesense, including creating +and updating them asynchronously. + +Classes: + - AsyncStemmingDictionaries: Handles async operations related to stemming dictionaries. + +Methods: + - __init__: Initializes the AsyncStemmingDictionaries object. + - __getitem__: Retrieves or creates an AsyncStemmingDictionary object for a given dictionary_id. + - upsert: Creates or updates a stemming dictionary. + - _upsert_list: Creates or updates a list of stemming dictionaries. + - _dump_to_jsonl: Dumps a list of StemmingDictionaryCreateSchema objects to a JSONL string. + - _parse_response: Parses the response from the upsert operation. + - _upsert_raw: Performs the raw upsert operation. + - _endpoint_path: Constructs the API endpoint path for this specific stemming dictionary. + +The AsyncStemmingDictionaries class interacts with the Typesense API to manage stemming dictionary +operations. It provides methods to create, update, and retrieve stemming dictionaries, as well as +access individual AsyncStemmingDictionary objects. + +For more information on stemming dictionaries, +refer to the Stemming [documentation](https://typesense.org/docs/28.0/api/stemming.html) +""" + +import json +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from .stemming_dictionary import AsyncStemmingDictionary +from typesense.types.stemming import ( + StemmingDictionariesRetrieveSchema, + StemmingDictionaryCreateSchema, +) + + +class AsyncStemmingDictionaries: + """ + Class for managing stemming dictionaries in Typesense (async). + + This class provides methods to interact with stemming dictionaries, including + creating, updating, and retrieving them. + + Attributes: + api_call (AsyncApiCall): The API call object for making requests. + stemming_dictionaries (Dict[str, AsyncStemmingDictionary]): A dictionary of + AsyncStemmingDictionary objects. + """ + + resource_path: typing.Final[str] = "/stemming/dictionaries" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncStemmingDictionaries object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.stemming_dictionaries: typing.Dict[str, AsyncStemmingDictionary] = {} + + def __getitem__(self, dictionary_id: str) -> AsyncStemmingDictionary: + """ + Get or create an AsyncStemmingDictionary object for a given dictionary_id. + + Args: + dictionary_id (str): The ID of the stemming dictionary. + + Returns: + AsyncStemmingDictionary: The AsyncStemmingDictionary object for the given ID. + """ + if not self.stemming_dictionaries.get(dictionary_id): + self.stemming_dictionaries[dictionary_id] = AsyncStemmingDictionary( + self.api_call, + dictionary_id, + ) + return self.stemming_dictionaries[dictionary_id] + + async def retrieve(self) -> StemmingDictionariesRetrieveSchema: + """ + Retrieve the list of stemming dictionaries. + + Returns: + StemmingDictionariesRetrieveSchema: The list of stemming dictionaries. + """ + response: StemmingDictionariesRetrieveSchema = await self.api_call.get( + self._endpoint_path(), + entity_type=StemmingDictionariesRetrieveSchema, + ) + return response + + @typing.overload + async def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.Union[str, bytes], + ) -> str: ... + + @typing.overload + async def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> typing.List[StemmingDictionaryCreateSchema]: ... + + async def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.Union[ + typing.List[StemmingDictionaryCreateSchema], + str, + bytes, + ], + ) -> typing.Union[str, typing.List[StemmingDictionaryCreateSchema]]: + if isinstance(word_root_combinations, (str, bytes)): + return await self._upsert_raw(dictionary_id, word_root_combinations) + + return await self._upsert_list(dictionary_id, word_root_combinations) + + async def _upsert_list( + self, + dictionary_id: str, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> typing.List[StemmingDictionaryCreateSchema]: + word_combos_in_jsonl = self._dump_to_jsonl(word_root_combinations) + response = await self._upsert_raw(dictionary_id, word_combos_in_jsonl) + return self._parse_response(response) + + def _dump_to_jsonl( + self, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> str: + word_root_strs = [json.dumps(combo) for combo in word_root_combinations] + + return "\n".join(word_root_strs) + + def _parse_response( + self, + response: str, + ) -> typing.List[StemmingDictionaryCreateSchema]: + object_list: typing.List[StemmingDictionaryCreateSchema] = [] + + for line in response.split("\n"): + try: + decoded = json.loads(line) + except json.JSONDecodeError as err: + raise ValueError(f"Failed to parse JSON from response: {line}") from err + object_list.append(decoded) + return object_list + + async def _upsert_raw( + self, + dictionary_id: str, + word_root_combinations: typing.Union[bytes, str], + ) -> str: + response: str = await self.api_call.post( + self._endpoint_path("import"), + body=word_root_combinations, + as_json=False, + entity_type=str, + params={"id": dictionary_id}, + ) + return response + + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for this specific stemming dictionary. + + Args: + action (str, optional): The action to perform on the stemming dictionary. + Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + if action: + return f"{AsyncStemmingDictionaries.resource_path}/{action}" + return AsyncStemmingDictionaries.resource_path diff --git a/src/typesense/async_/stemming_dictionary.py b/src/typesense/async_/stemming_dictionary.py new file mode 100644 index 0000000..6c1b60a --- /dev/null +++ b/src/typesense/async_/stemming_dictionary.py @@ -0,0 +1,75 @@ +""" +Module for managing individual stemming dictionaries in Typesense (async). + +This module provides a class for managing individual stemming dictionaries in Typesense, +including retrieving them asynchronously. + +Classes: + - AsyncStemmingDictionary: Handles async operations related to individual stemming dictionaries. + +Methods: + - __init__: Initializes the AsyncStemmingDictionary object. + - retrieve: Retrieves this specific stemming dictionary. + +The AsyncStemmingDictionary class interacts with the Typesense API to manage operations on a +specific stemming dictionary. It provides methods to retrieve the dictionary details. + +For more information on stemming dictionaries, refer to the Stemming +[documentation](https://typesense.org/docs/28.0/api/stemming.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import AsyncApiCall +from typesense.types.stemming import StemmingDictionarySchema + + +class AsyncStemmingDictionary: + """ + Class for managing individual stemming dictionaries in Typesense (async). + + This class provides methods to interact with a specific stemming dictionary, + including retrieving it. + + Attributes: + api_call (AsyncApiCall): The API call object for making requests. + dict_id (str): The ID of the stemming dictionary. + """ + + def __init__(self, api_call: AsyncApiCall, dict_id: str): + """ + Initialize the AsyncStemmingDictionary object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + dict_id (str): The ID of the stemming dictionary. + """ + self.api_call = api_call + self.dict_id = dict_id + + async def retrieve(self) -> StemmingDictionarySchema: + """ + Retrieve this specific stemming dictionary. + + Returns: + StemmingDictionarySchema: The schema containing the stemming dictionary details. + """ + response: StemmingDictionarySchema = await self.api_call.get( + self._endpoint_path, + entity_type=StemmingDictionarySchema, + as_json=True, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific stemming dictionary. + + Returns: + str: The constructed endpoint path. + """ + from .stemming_dictionaries import AsyncStemmingDictionaries + + return "/".join([AsyncStemmingDictionaries.resource_path, self.dict_id]) diff --git a/src/typesense/async_/stopwords.py b/src/typesense/async_/stopwords.py new file mode 100644 index 0000000..326ddb8 --- /dev/null +++ b/src/typesense/async_/stopwords.py @@ -0,0 +1,117 @@ +""" +This module provides async functionality for managing stopwords in Typesense. + +Classes: + - AsyncStopwords: Handles async operations related to stopwords and stopword sets. + +Methods: + - __init__: Initializes the AsyncStopwords object. + - __getitem__: Retrieves or creates an AsyncStopwordsSet object for a given stopwords_set_id. + - upsert: Creates or updates a stopwords set. + - retrieve: Retrieves all stopwords sets. + +Attributes: + - RESOURCE_PATH: The API resource path for stopwords operations. + +The AsyncStopwords class interacts with the Typesense API to manage stopwords operations. +It provides methods to create, update, and retrieve stopwords sets, as well as access +individual AsyncStopwordsSet objects. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from .stopwords_set import AsyncStopwordsSet +from typesense.types.stopword import ( + StopwordCreateSchema, + StopwordSchema, + StopwordsRetrieveSchema, +) + + +class AsyncStopwords: + """ + Class for managing stopwords in Typesense (async). + + This class provides methods to interact with stopwords and stopwords sets, including + creating, updating, retrieving, and accessing individual stopwords sets. + + Attributes: + RESOURCE_PATH (str): The API resource path for stopwords operations. + api_call (AsyncApiCall): The API call object for making requests. + stopwords_sets (Dict[str, AsyncStopwordsSet]): A dictionary of AsyncStopwordsSet objects. + """ + + resource_path: typing.Final[str] = "/stopwords" + + def __init__(self, api_call: AsyncApiCall): + """ + Initialize the AsyncStopwords object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.stopwords_sets: typing.Dict[str, AsyncStopwordsSet] = {} + + def __getitem__(self, stopwords_set_id: str) -> AsyncStopwordsSet: + """ + Get or create an AsyncStopwordsSet object for a given stopwords_set_id. + + Args: + stopwords_set_id (str): The ID of the stopwords set. + + Returns: + AsyncStopwordsSet: The AsyncStopwordsSet object for the given ID. + """ + if not self.stopwords_sets.get(stopwords_set_id): + self.stopwords_sets[stopwords_set_id] = AsyncStopwordsSet( + self.api_call, + stopwords_set_id, + ) + return self.stopwords_sets[stopwords_set_id] + + async def upsert( + self, + stopwords_set_id: str, + stopwords_set: StopwordCreateSchema, + ) -> StopwordSchema: + """ + Create or update a stopwords set. + + Args: + stopwords_set_id (str): The ID of the stopwords set to upsert. + stopwords_set (StopwordCreateSchema): + The schema for creating or updating the stopwords set. + + Returns: + StopwordSchema: The created or updated stopwords set. + """ + response: StopwordSchema = await self.api_call.put( + "/".join([AsyncStopwords.resource_path, stopwords_set_id]), + body=stopwords_set, + entity_type=StopwordSchema, + ) + return response + + async def retrieve(self) -> StopwordsRetrieveSchema: + """ + Retrieve all stopwords sets. + + Returns: + StopwordsRetrieveSchema: The schema containing all stopwords sets. + """ + response: StopwordsRetrieveSchema = await self.api_call.get( + AsyncStopwords.resource_path, + as_json=True, + entity_type=StopwordsRetrieveSchema, + ) + return response diff --git a/src/typesense/async_/stopwords_set.py b/src/typesense/async_/stopwords_set.py new file mode 100644 index 0000000..8514bb9 --- /dev/null +++ b/src/typesense/async_/stopwords_set.py @@ -0,0 +1,87 @@ +""" +This module provides async functionality for managing individual stopwords sets in Typesense. + +Classes: + - AsyncStopwordsSet: Handles async operations related to a specific stopwords set. + +Methods: + - __init__: Initializes the AsyncStopwordsSet object. + - retrieve: Retrieves the details of this specific stopwords set. + - delete: Deletes this specific stopwords set. + - _endpoint_path: Constructs the API endpoint path for this specific stopwords set. + +The AsyncStopwordsSet class interacts with the Typesense API to manage operations on a +specific stopwords set. It provides methods to retrieve and delete individual stopwords sets. + +For more information regarding Stopwords, refer to the Stopwords [documentation] +(https://typesense.org/docs/27.0/api/stopwords.html). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import AsyncApiCall +from typesense.types.stopword import StopwordDeleteSchema, StopwordsSingleRetrieveSchema + + +class AsyncStopwordsSet: + """ + Class for managing individual stopwords sets in Typesense (async). + + This class provides methods to interact with a specific stopwords set, + including retrieving and deleting it. + + Attributes: + stopwords_set_id (str): The ID of the stopwords set. + api_call (AsyncApiCall): The API call object for making requests. + """ + + def __init__(self, api_call: AsyncApiCall, stopwords_set_id: str) -> None: + """ + Initialize the AsyncStopwordsSet object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + stopwords_set_id (str): The ID of the stopwords set. + """ + self.stopwords_set_id = stopwords_set_id + self.api_call = api_call + + async def retrieve(self) -> StopwordsSingleRetrieveSchema: + """ + Retrieve this specific stopwords set. + + Returns: + StopwordsSingleRetrieveSchema: The schema containing the stopwords set details. + """ + response: StopwordsSingleRetrieveSchema = await self.api_call.get( + self._endpoint_path, + entity_type=StopwordsSingleRetrieveSchema, + as_json=True, + ) + return response + + async def delete(self) -> StopwordDeleteSchema: + """ + Delete this specific stopwords set. + + Returns: + StopwordDeleteSchema: The schema containing the deletion response. + """ + response: StopwordDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=StopwordDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific stopwords set. + + Returns: + str: The constructed endpoint path. + """ + from .stopwords import AsyncStopwords + + return "/".join([AsyncStopwords.resource_path, self.stopwords_set_id]) diff --git a/src/typesense/async_/synonym.py b/src/typesense/async_/synonym.py new file mode 100644 index 0000000..3ad6bc2 --- /dev/null +++ b/src/typesense/async_/synonym.py @@ -0,0 +1,104 @@ +""" +This module provides async functionality for managing individual synonyms in Typesense. + +Classes: + - AsyncSynonym: Handles async operations related to a specific synonym within a collection. + +Methods: + - __init__: Initializes the AsyncSynonym object. + - _endpoint_path: Constructs the API endpoint path for this specific synonym. + - retrieve: Retrieves the details of this specific synonym. + - delete: Deletes this specific synonym. + +The AsyncSynonym class interacts with the Typesense API to manage operations on a +specific synonym within a collection. It provides methods to retrieve and delete +individual synonyms. + +For more information regarding Synonyms, refer to the Synonyms [documentation] +(https://typesense.org/docs/27.0/api/synonyms.html#synonyms). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import AsyncApiCall +from typesense.logger import warn_deprecation +from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema + + +class AsyncSynonym: + """ + Class for managing individual synonyms in a Typesense collection (async). + + This class provides methods to interact with a specific synonym, + including retrieving and deleting it. + + Attributes: + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonym_id (str): The ID of the synonym. + """ + + def __init__( + self, + api_call: AsyncApiCall, + collection_name: str, + synonym_id: str, + ) -> None: + """ + Initialize the AsyncSynonym object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonym_id (str): The ID of the synonym. + """ + self.api_call = api_call + self.collection_name = collection_name + self.synonym_id = synonym_id + + async def retrieve(self) -> SynonymSchema: + """ + Retrieve this specific synonym. + + Returns: + SynonymSchema: The schema containing the synonym details. + """ + return await self.api_call.get(self._endpoint_path, entity_type=SynonymSchema) + + async def delete(self) -> SynonymDeleteSchema: + """ + Delete this specific synonym. + + Returns: + SynonymDeleteSchema: The schema containing the deletion response. + """ + return await self.api_call.delete( + self._endpoint_path, + entity_type=SynonymDeleteSchema, + ) + + @property + @warn_deprecation( # type: ignore[untyped-decorator] + "The synonym API (collections/{collection}/synonyms/{synonym_id}) is deprecated is removed on v30+. " + "Use synonym sets (synonym_sets) instead.", + flag_name="synonyms_deprecation", + ) + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific synonym. + + Returns: + str: The constructed endpoint path. + """ + from .collections import AsyncCollections + from .synonyms import AsyncSynonyms + + return "/".join( + [ + AsyncCollections.resource_path, + self.collection_name, + AsyncSynonyms.resource_path, + self.synonym_id, + ], + ) diff --git a/src/typesense/async_/synonym_set.py b/src/typesense/async_/synonym_set.py new file mode 100644 index 0000000..43dde26 --- /dev/null +++ b/src/typesense/async_/synonym_set.py @@ -0,0 +1,102 @@ +"""Client for single Synonym Set operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from typesense.types.synonym_set import ( + SynonymItemDeleteSchema, + SynonymItemSchema, + SynonymSetCreateSchema, + SynonymSetDeleteSchema, + SynonymSetRetrieveSchema, +) + + +class AsyncSynonymSet: + def __init__(self, api_call: AsyncApiCall, name: str) -> None: + self.api_call = api_call + self.name = name + + @property + def _endpoint_path(self) -> str: + from .synonym_sets import AsyncSynonymSets + + return "/".join([AsyncSynonymSets.resource_path, self.name]) + + async def retrieve(self) -> SynonymSetRetrieveSchema: + response: SynonymSetRetrieveSchema = await self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=SynonymSetRetrieveSchema, + ) + return response + + async def upsert(self, set: SynonymSetCreateSchema) -> SynonymSetCreateSchema: + response: SynonymSetCreateSchema = await self.api_call.put( + self._endpoint_path, + entity_type=SynonymSetCreateSchema, + body=set, + ) + return response + + async def delete(self) -> SynonymSetDeleteSchema: + response: SynonymSetDeleteSchema = await self.api_call.delete( + self._endpoint_path, + entity_type=SynonymSetDeleteSchema, + ) + return response + + @property + def _items_path(self) -> str: + return "/".join([self._endpoint_path, "items"]) # /synonym_sets/{name}/items + + async def list_items( + self, + *, + limit: typing.Union[int, None] = None, + offset: typing.Union[int, None] = None, + ) -> typing.List[SynonymItemSchema]: + params: typing.Dict[str, typing.Union[int, None]] = { + "limit": limit, + "offset": offset, + } + clean_params: typing.Dict[str, int] = { + k: v for k, v in params.items() if v is not None + } + response: typing.List[SynonymItemSchema] = await self.api_call.get( + self._items_path, + as_json=True, + entity_type=typing.List[SynonymItemSchema], + params=clean_params or None, + ) + return response + + async def get_item(self, item_id: str) -> SynonymItemSchema: + response: SynonymItemSchema = await self.api_call.get( + "/".join([self._items_path, item_id]), + as_json=True, + entity_type=SynonymItemSchema, + ) + return response + + async def upsert_item( + self, item_id: str, item: SynonymItemSchema + ) -> SynonymItemSchema: + response: SynonymItemSchema = await self.api_call.put( + "/".join([self._items_path, item_id]), + body=item, + entity_type=SynonymItemSchema, + ) + return response + + async def delete_item(self, item_id: str) -> SynonymItemDeleteSchema: + # API returns {"id": "..."} for delete; openapi defines SynonymItemDeleteResponse with name but for items it's id + response: SynonymItemDeleteSchema = await self.api_call.delete( + "/".join([self._items_path, item_id]), entity_type=SynonymItemDeleteSchema + ) + return response diff --git a/src/typesense/async_/synonym_sets.py b/src/typesense/async_/synonym_sets.py new file mode 100644 index 0000000..11f8586 --- /dev/null +++ b/src/typesense/async_/synonym_sets.py @@ -0,0 +1,34 @@ +"""Client for Synonym Sets collection operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import AsyncApiCall +from .synonym_set import AsyncSynonymSet +from typesense.types.synonym_set import ( + SynonymSetSchema, +) + + +class AsyncSynonymSets: + resource_path: typing.Final[str] = "/synonym_sets" + + def __init__(self, api_call: AsyncApiCall) -> None: + self.api_call = api_call + + async def retrieve(self) -> typing.List[SynonymSetSchema]: + response: typing.List[SynonymSetSchema] = await self.api_call.get( + AsyncSynonymSets.resource_path, + as_json=True, + entity_type=typing.List[SynonymSetSchema], + ) + return response + + def __getitem__(self, synonym_set_name: str) -> AsyncSynonymSet: + from .synonym_set import AsyncSynonymSet as PerSet + + return PerSet(self.api_call, synonym_set_name) diff --git a/src/typesense/async_/synonyms.py b/src/typesense/async_/synonyms.py new file mode 100644 index 0000000..027172e --- /dev/null +++ b/src/typesense/async_/synonyms.py @@ -0,0 +1,152 @@ +""" +This module provides async functionality for managing synonyms in Typesense. + +Classes: + - AsyncSynonyms: Handles async operations related to synonyms within a collection. + +Methods: + - __init__: Initializes the AsyncSynonyms object. + - __getitem__: Retrieves or creates an AsyncSynonym object for a given synonym_id. + - _endpoint_path: Constructs the API endpoint path for synonym operations. + - upsert: Creates or updates a synonym. + - retrieve: Retrieves all synonyms for the collection. + +Attributes: + - RESOURCE_PATH: The API resource path for synonyms. + +The AsyncSynonyms class interacts with the Typesense API to manage synonym operations +within a specific collection. It provides methods to create, update, and retrieve +synonyms, as well as access individual AsyncSynonym objects. + +For more information regarding Synonyms, refer to the Synonyms [documentation] +(https://typesense.org/docs/27.0/api/synonyms.html#synonyms). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typing_extensions import deprecated + +from .api_call import AsyncApiCall +from .synonym import AsyncSynonym +from typesense.logger import warn_deprecation +from typesense.types.synonym import ( + SynonymCreateSchema, + SynonymSchema, + SynonymsRetrieveSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +@deprecated("AsyncSynonyms is deprecated on v30+. Use client.synonym_sets instead.") +class AsyncSynonyms: + """ + Class for managing synonyms in a Typesense collection (async). + + This class provides methods to interact with synonyms, including + retrieving, creating, and updating them. + + Attributes: + RESOURCE_PATH (str): The API resource path for synonyms. + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonyms (Dict[str, AsyncSynonym]): A dictionary of AsyncSynonym objects. + """ + + resource_path: typing.Final[str] = "synonyms" + + def __init__(self, api_call: AsyncApiCall, collection_name: str) -> None: + """ + Initialize the AsyncSynonyms object. + + Args: + api_call (AsyncApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ + self.api_call = api_call + self.collection_name = collection_name + self.synonyms: typing.Dict[str, AsyncSynonym] = {} + + def __getitem__(self, synonym_id: str) -> AsyncSynonym: + """ + Get or create an AsyncSynonym object for a given synonym_id. + + Args: + synonym_id (str): The ID of the synonym. + + Returns: + AsyncSynonym: The AsyncSynonym object for the given ID. + """ + if not self.synonyms.get(synonym_id): + self.synonyms[synonym_id] = AsyncSynonym( + self.api_call, + self.collection_name, + synonym_id, + ) + return self.synonyms[synonym_id] + + async def upsert( + self, synonym_id: str, schema: SynonymCreateSchema + ) -> SynonymSchema: + """ + Create or update a synonym. + + Args: + id (str): The ID of the synonym. + schema (SynonymCreateSchema): The schema for creating or updating the synonym. + + Returns: + SynonymSchema: The created or updated synonym. + """ + response = await self.api_call.put( + self._endpoint_path(synonym_id), + body=schema, + entity_type=SynonymSchema, + ) + return response + + async def retrieve(self) -> SynonymsRetrieveSchema: + """ + Retrieve all synonyms for the collection. + + Returns: + SynonymsRetrieveSchema: The schema containing all synonyms. + """ + response = await self.api_call.get( + self._endpoint_path(), + entity_type=SynonymsRetrieveSchema, + ) + return response + + @warn_deprecation( # type: ignore[untyped-decorator] + "The synonyms API (collections/{collection}/synonyms) is deprecated is removed on v30+. " + "Use synonym sets (synonym_sets) instead.", + flag_name="synonyms_deprecation", + ) + def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for synonym operations. + + Args: + synonym_id (Union[str, None], optional): The ID of the synonym. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from .collections import AsyncCollections + + synonym_id = synonym_id or "" + return "/".join( + [ + AsyncCollections.resource_path, + self.collection_name, + AsyncSynonyms.resource_path, + synonym_id, + ], + ) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py new file mode 100644 index 0000000..85159cd --- /dev/null +++ b/src/typesense/configuration.py @@ -0,0 +1,405 @@ +""" +This module provides configuration management for the Typesense Instance. + +Classes: + - Config: Handles loading and accessing configuration settings. + - Node: Represents a node in the Typesense cluster. + +Functions: + - load_config: Loads configuration from a file. + - get_setting: Retrieves a specific setting from the configuration. + - set_setting: Updates a specific setting in the configuration. + +Exceptions: + - ConfigError: Custom exception for configuration-related errors. +""" + +import sys +import time + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from urllib.parse import urlparse + +from typesense.exceptions import ConfigError +from typesense.logger import logger + + +class NodeConfigDict(typing.TypedDict): + """ + A dictionary that represents the configuration for a node in the Typesense cluster. + + Attributes: + host (str): The host name of the node. + port (int): The port number of the node. + path (str, optional): The path of the node. + protocol (typing.Literal['http', 'https'] | str): The protocol of the node. + """ + + host: str + port: int + path: typing.NotRequired[str] + protocol: typing.Union[typing.Literal["http", "https"], str] + + +class ConfigDict(typing.TypedDict): + """ + A dictionary that represents the configuration for the Typesense client. + + Attributes: + nodes (list[typing.Union[str, NodeConfigDict]]): A list of dictionaries or URLs that + represent the nodes in the cluster. + + nearest_node (typing.Union[str, NodeConfigDict]): A dictionary or URL + that represents the nearest node to the client. + + api_key (str): The API key to use for authentication. + + num_retries (int): The number of retries to attempt before failing. + + interval_seconds (int): The interval in seconds between retries. + + healthcheck_interval_seconds (int): The interval in seconds between + health checks. + + verify (bool): Whether to verify the SSL certificate. + + timeout_seconds (int, deprecated): The connection timeout in seconds. + + master_node (typing.Union[str, NodeConfigDict], deprecated): A dictionary or + URL that represents the master node. + + additional_headers (dict): Additional headers to include in the request. + + read_replica_nodes (list[typing.Union[str, NodeConfigDict]], deprecated): A list of + dictionaries or URLs that represent the read replica nodes. + + connection_timeout_seconds (float): The connection timeout in seconds. + + suppress_deprecation_warnings (bool): Whether to suppress deprecation warnings. + """ + + nodes: typing.List[typing.Union[str, NodeConfigDict]] + nearest_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] + api_key: str + num_retries: typing.NotRequired[int] + interval_seconds: typing.NotRequired[int] + healthcheck_interval_seconds: typing.NotRequired[int] + verify: typing.NotRequired[bool] + timeout_seconds: typing.NotRequired[int] # deprecated + master_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] # deprecated + additional_headers: typing.NotRequired[typing.Dict[str, str]] + read_replica_nodes: typing.NotRequired[ + typing.List[typing.Union[str, NodeConfigDict]] + ] # deprecated + connection_timeout_seconds: typing.NotRequired[float] + suppress_deprecation_warnings: typing.NotRequired[bool] + + +class Node: + """ + Class for representing a node in the Typesense cluster. + + Attributes: + host (str): The host name of the node. + port (str | int): The port number of the node. + path (str): The path of the node. + protocol (typing.Literal['http', 'https'] | str): The protocol of the node. + healthy (bool): Whether the node is healthy or not. + """ + + def __init__( + self, + host: str, + port: typing.Union[str, int], + path: str, + protocol: typing.Union[typing.Literal["http", "https"], str], + ) -> None: + """ + Initialize a Node object with the specified host, port, path, and protocol. + + Args: + host (str): The host name of the node. + port (str | int): The port number of the node. + path (str): The path of the node. + protocol (typing.Literal['http', 'https'] | str): The protocol of the node. + """ + self.host = host + self.port = port + self.path = path + self.protocol = protocol + + # Used to skip bad hosts + self.healthy = True + + # Used to track the last time this node was accessed + self.last_access_ts: int = int(time.time()) + + @classmethod + def from_url(cls, url: str) -> "Node": + """ + Initialize a Node object from a URL string. + + Args: + url (str): The URL string to parse. + + Returns: + Node: The Node object created from the URL string. + + Raises: + ConfigError: If the URL does not contain the host name, port number, or protocol. + """ + parsed = urlparse(url) + if not parsed.hostname: + raise ConfigError("Node URL does not contain the host name.") + if not parsed.port: + raise ConfigError("Node URL does not contain the port.") + if not parsed.scheme: + raise ConfigError("Node URL does not contain the protocol.") + + return cls(parsed.hostname, parsed.port, parsed.path, parsed.scheme) + + def url(self) -> str: + """ + Generate the URL of the node. + + Returns: + str: The URL of the node + """ + return f"{self.protocol}://{self.host}:{self.port}{self.path}" + + +class Configuration: + """ + Class for managing the configuration settings for the Typesense client. + + Attributes: + nodes (list[Node]): A list of Node objects representing the nodes in the cluster. + nearest_node (Node | None): The nearest node to the client. + api_key (str): The API key to use for authentication. + connection_timeout_seconds (float): The connection timeout in seconds. + num_retries (int): The number of retries to attempt before failing. + retry_interval_seconds (float): The interval in seconds between retries. + healthcheck_interval_seconds (int): The interval in seconds between health checks. + verify (bool): Whether to verify the SSL certificate. + """ + + def __init__( + self, + config_dict: ConfigDict, + ) -> None: + """ + Initialize a Configuration object with the specified configuration settings. + + Args: + config_dict (ConfigDict): A dictionary containing the configuration settings. + """ + self.validations = ConfigurationValidations + self.validations.show_deprecation_warnings(config_dict) + self.validations.validate_config_dict(config_dict) + + self.nodes: typing.List[Node] = [ + self._initialize_nodes(node) for node in config_dict["nodes"] + ] + + nearest_node = config_dict.get("nearest_node", None) + + self.nearest_node = self._handle_nearest_node(nearest_node) + self.api_key = config_dict.get("api_key", " ") + self.connection_timeout_seconds = config_dict.get( + "connection_timeout_seconds", + 3.0, + ) + self.num_retries = config_dict.get("num_retries", 3) + self.retry_interval_seconds = config_dict.get("retry_interval_seconds", 1.0) + self.healthcheck_interval_seconds = config_dict.get( + "healthcheck_interval_seconds", + 60, + ) + self.verify = config_dict.get("verify", True) + self.additional_headers = config_dict.get("additional_headers", {}) + self.suppress_deprecation_warnings = config_dict.get( + "suppress_deprecation_warnings", False + ) + + def _handle_nearest_node( + self, + nearest_node: typing.Union[str, NodeConfigDict, None], + ) -> typing.Union[Node, None]: + """ + Handle the nearest node configuration. + + Args: + nearest_node (str | NodeConfigDict): The nearest node configuration. + + Returns: + Node | None: The nearest node object if it exists, None otherwise. + """ + if nearest_node is None: + return None + return self._initialize_nodes(nearest_node) + + def _initialize_nodes( + self, + node: typing.Union[str, NodeConfigDict], + ) -> Node: + """ + Handle the initialization of a node. + + Args: + node (Node): The node to initialize. + + Returns: + Node: The initialized node. + """ + if isinstance(node, str): + return Node.from_url(node) + + return Node( + node["host"], + node["port"], + node.get("path", ""), + node["protocol"], + ) + + +class ConfigurationValidations: + """Class for validating the configuration dictionary.""" + + @staticmethod + def validate_config_dict(config_dict: ConfigDict) -> None: + """ + Validate the configuration dictionary to ensure it contains the required fields. + + Args: + config_dict (ConfigDict): The configuration dictionary to validate. + + Raises: + ConfigError: If the configuration dictionary is missing required fields. + """ + ConfigurationValidations.validate_required_config_fields(config_dict) + ConfigurationValidations.validate_nodes(config_dict["nodes"]) + + nearest_node = config_dict.get("nearest_node", None) + if nearest_node: + ConfigurationValidations.validate_nearest_node(nearest_node) + + @staticmethod + def validate_required_config_fields(config_dict: ConfigDict) -> None: + """ + Validate the presence of required fields in the configuration dictionary. + + Args: + config_dict (ConfigDict): The configuration dictionary to validate. + + Raises: + ConfigError: If the configuration dictionary is missing required fields. + """ + if not config_dict.get("nodes"): + raise ConfigError("`nodes` is not defined.") + + if not config_dict.get("api_key"): + raise ConfigError("`api_key` is not defined.") + + @staticmethod + def validate_nodes(nodes: typing.List[typing.Union[str, NodeConfigDict]]) -> None: + """ + Validate the nodes in the configuration dictionary. + + Args: + nodes (list): The list of nodes to validate. + + Raises: + ConfigError: If any node is invalid. + """ + for node in nodes: + if not ConfigurationValidations.validate_node_fields(node): + raise ConfigError( + " ".join( + [ + "`node` entry must be a URL string or a", + "dictionary with the following required keys:", + "host, port, protocol", + ], + ), + ) + + @staticmethod + def validate_nearest_node(nearest_node: typing.Union[str, NodeConfigDict]) -> None: + """ + Validate the nearest node in the configuration dictionary. + + Args: + nearest_node (dict): The nearest node to validate. + + Raises: + ConfigError: If the nearest node is invalid. + """ + if not ConfigurationValidations.validate_node_fields(nearest_node): + raise ConfigError( + " ".join( + [ + "`nearest_node` entry must be a URL string or a dictionary", + "with the following required keys:", + "host, port, protocol", + ], + ), + ) + + @staticmethod + def validate_node_fields(node: typing.Union[str, NodeConfigDict]) -> bool: + """ + Validate the fields of a node in the configuration dictionary. + + Args: + node (str | NodeConfigDict): The node to validate. + + Returns: + bool: True if the node is valid, False otherwise. + """ + if isinstance(node, str): + return True + expected_fields = {"host", "port", "protocol"} + return expected_fields.issubset(node) + + @staticmethod + def show_deprecation_warnings(config_dict: ConfigDict) -> None: + """ + Show deprecation warnings for deprecated configuration fields. + + Args: + config_dict (ConfigDict): The configuration dictionary + to check for deprecated fields. + """ + if config_dict.get("timeout_seconds"): + logger.warning( + " ".join( + [ + "Deprecation warning: timeout_seconds is now renamed", + "to connection_timeout_seconds", + ], + ), + ) + + if config_dict.get("master_node"): + logger.warning( + " ".join( + [ + "Deprecation warning: master_node is now consolidated", + "to nodes,starting with Typesense Server v0.12", + ], + ), + ) + + if config_dict.get("read_replica_nodes"): + logger.warning( + " ".join( + [ + "Deprecation warning: read_replica_nodes is now", + "consolidated to nodes, starting with Typesense Server v0.12", + ], + ), + ) diff --git a/src/typesense/exceptions.py b/src/typesense/exceptions.py new file mode 100644 index 0000000..f1af518 --- /dev/null +++ b/src/typesense/exceptions.py @@ -0,0 +1,81 @@ +""" +This module defines custom exception classes for the Typesense client. + +Classes: + - TypesenseClientError: Base exception class for Typesense client errors. + - ConfigError: Raised when there is an error in the client configuration. + - Timeout: Raised when a request times out. + - RequestMalformed: Raised when a request's parameters are malformed. + - RequestUnauthorized: Raised when a request is unauthorized. + - RequestForbidden: Raised when a request is forbidden. + - ObjectNotFound: Raised when a resource is not found. + - ObjectAlreadyExists: Raised when a resource already exists. + - ObjectUnprocessable: Raised when a resource is unprocessable. + - ServerError: Raised when the server encounters an error. + - ServiceUnavailable: Raised when the service is unavailable. + - HTTPStatus0Error: Raised when the HTTP status code is 0. + - InvalidParameter: Raised when a parameter is invalid. + +These exception classes provide specific error types for various scenarios +that may occur when interacting with the Typesense API. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + + +class TypesenseClientError(IOError): + """ + Base exception class for Typesense client errors. + + This class extends IOError and serves as the parent class for all + custom Typesense client exceptions. + """ + + +class ConfigError(TypesenseClientError): + """Raised when there is an error in the client configuration.""" + + +class Timeout(TypesenseClientError): + """Raised when a request times out.""" + + +class RequestMalformed(TypesenseClientError): + """Raised when a request's parameters are malformed.""" + + +class RequestUnauthorized(TypesenseClientError): + """Raised when a request is unauthorized.""" + + +class RequestForbidden(TypesenseClientError): + """Raised when a request is forbidden.""" + + +class ObjectNotFound(TypesenseClientError): + """Raised when a resource is not found.""" + + +class ObjectAlreadyExists(TypesenseClientError): + """Raised when a resource already exists.""" + + +class ObjectUnprocessable(TypesenseClientError): + """Raised when a resource is unprocessable.""" + + +class ServerError(TypesenseClientError): + """Raised when the server encounters an error.""" + + +class ServiceUnavailable(TypesenseClientError): + """Raised when the service is unavailable.""" + + +class HTTPStatus0Error(TypesenseClientError): + """Raised when the HTTP status code is 0.""" + + +class InvalidParameter(TypesenseClientError): + """Raised when a parameter is invalid.""" diff --git a/src/typesense/logger.py b/src/typesense/logger.py new file mode 100644 index 0000000..2834e28 --- /dev/null +++ b/src/typesense/logger.py @@ -0,0 +1,78 @@ +"""Logging configuration for the Typesense Python client.""" + +import functools +import logging +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +logger = logging.getLogger("typesense") +logger.setLevel(logging.WARN) + +_deprecation_warnings: typing.Dict[str, bool] = {} + +if sys.version_info >= (3, 11): + from typing import ParamSpec, TypeVar +else: + from typing_extensions import ParamSpec, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +def warn_deprecation( + message: str, + *, + flag_name: typing.Union[str, None] = None, +) -> typing.Callable[[typing.Callable[P, R]], typing.Callable[P, R]]: + """ + Decorator to warn about deprecation when a method is called. + + This decorator will log a deprecation warning once per flag_name when the + decorated method is called. The warning is only shown once to avoid spam. + + Args: + message: The deprecation warning message to display. + flag_name: Optional name for the warning flag. If not provided, a default + name will be generated based on the function's module and name. + + Returns: + A decorator function that wraps the target method. + + Example: + >>> @warn_deprecation("This method is deprecated", flag_name="my_method") + ... def my_method(self): + ... return "result" + """ + + def decorator(func: typing.Callable[P, R]) -> typing.Callable[P, R]: + if flag_name is None: + flag = f"{func.__module__}.{func.__qualname__}" + else: + flag = flag_name + + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + suppress_warnings = False + if ( + args + and len(args) > 1 + and args[1] + and args[1].__class__.__name__ == "ApiCall" + and hasattr(args[1], "config") + ): + suppress_warnings = getattr( + args[1].config, "suppress_deprecation_warnings", False + ) + + if not suppress_warnings and not _deprecation_warnings.get(flag, False): + logger.warning(f"Deprecation warning: {message}") + _deprecation_warnings[flag] = True + return func(*args, **kwargs) + + return typing.cast(typing.Callable[P, R], wrapper) + + return decorator diff --git a/src/typesense/node_manager.py b/src/typesense/node_manager.py new file mode 100644 index 0000000..e671c8d --- /dev/null +++ b/src/typesense/node_manager.py @@ -0,0 +1,128 @@ +""" +This module provides functionality for managing nodes in a Typesense cluster configuration. + +It contains the NodeManager class, which is responsible for node selection, health checks, +and rotation strategies for load balancing and fault tolerance in a Typesense cluster. + +Key features: +- Round-robin node selection +- Nearest node prioritization (if configured) +- Node health tracking and updates +- Periodic health checks based on a configurable interval + +Classes: + NodeManager: Manages the nodes in a Typesense cluster configuration. + +Dependencies: + - typesense.configuration: Provides Configuration and Node classes + - typesense.logger: Provides logging functionality + +Usage: + from typesense.configuration import Configuration + from node_manager import NodeManager + + config = Configuration(...) + node_manager = NodeManager(config) + node = node_manager.get_node() + +Note: This module is part of the Typesense Python client library and is +used internally by other components of the library. +""" + +import copy +import time + +from typesense.configuration import Configuration, Node +from typesense.logger import logger + + +class NodeManager: + """ + Manages the nodes in a Typesense cluster configuration. + + This class handles node selection, health checks, and rotation for load balancing + and fault tolerance in a Typesense cluster. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + nodes (List[Node]): A copy of the nodes from the configuration. + node_index (int): The index of the current node in the rotation. + """ + + def __init__(self, config: Configuration): + """ + Initialize the NodeManager with a given configuration. + + Args: + config (Configuration): The configuration object for the Typesense client. + """ + self.config = config + self.nodes = copy.deepcopy(config.nodes) + self.node_index = 0 + self._initialize_nodes() + + def get_node(self) -> Node: + """ + Get the next available healthy node. + + This method implements a round-robin selection strategy, prioritizing the nearest node + if configured, and considering the health status of each node. + + Returns: + Node: The selected node for the next operation. + """ + if self.config.nearest_node: + if self.config.nearest_node.healthy or self._is_due_for_health_check( + self.config.nearest_node, + ): + return self.config.nearest_node + + node_index = 0 + while node_index < len(self.nodes): + node_index += 1 + node = self.nodes[self.node_index] + self.node_index = (self.node_index + 1) % len(self.nodes) + if node.healthy or self._is_due_for_health_check(node): + return node + + logger.debug("No healthy nodes were found. Returning the next node.") + return self.nodes[self.node_index] + + def set_node_health(self, node: Node, is_healthy: bool) -> None: + """ + Set the health status of a node and update its last access timestamp. + + Args: + node (Node): The node to update. + is_healthy (bool): The health status to set for the node. + """ + node.healthy = is_healthy + node.last_access_ts = int(time.time()) + + def _is_due_for_health_check(self, node: Node) -> bool: + """ + Check if a node is due for a health check based on the configured interval. + + Args: + node (Node): The node to check. + + Returns: + bool: True if the node is due for a health check, False otherwise. + """ + current_epoch_ts = int(time.time()) + return bool( + (current_epoch_ts - node.last_access_ts) + > self.config.healthcheck_interval_seconds, + ) + + def _initialize_nodes(self) -> None: + """ + Initialize all nodes as healthy. + + This method sets the initial health status of all nodes, including the nearest node + if configured, to healthy. + """ + if self.config.nearest_node: + self.set_node_health(self.config.nearest_node, is_healthy=True) + for node in self.nodes: + self.set_node_health(node, is_healthy=True) diff --git a/src/typesense/preprocess.py b/src/typesense/preprocess.py new file mode 100644 index 0000000..b45db0c --- /dev/null +++ b/src/typesense/preprocess.py @@ -0,0 +1,147 @@ +""" +Functionality for preprocessing parameters in the Typesense Python client library. + +This module contains utility functions for converting various data types to strings and +processing parameter lists and dictionaries. These functions are used to prepare +data for API requests to Typesense. + +Key features: +- Convert individual values (int, str, bool) to strings +- Process lists of parameters into comma-separated strings +- Stringify search parameter dictionaries + +Functions: + stringify: Convert a single value to a string. + process_param_list: Convert a list of parameters to a comma-separated string. + stringify_search_params: Convert a dictionary of search parameters to strings. + +Types: + _ListTypes: Type alias for a list of strings, integers, or booleans. + _Types: Type alias for a single string, integer, or boolean. + ParamSchema: Type alias for a dictionary of search parameters. + StringifiedParamSchema: Type alias for a dictionary of stringified search parameters. + +Dependencies: + - typesense.exceptions: Provides InvalidParameter exception + - typing or typing_extensions: For type hinting + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from typesense.exceptions import InvalidParameter + +if sys.version_info > (3, 11): + import typing +else: + import typing_extensions as typing + +_ListTypes = typing.List[typing.Union[str, int, bool]] +_Types = typing.Union[int, str, bool] +ParamSchema: typing.TypeAlias = typing.Dict[ + str, + typing.Union[ + _Types, + _ListTypes, + ], +] +StringifiedParamSchema: typing.TypeAlias = typing.Dict[str, str] + + +def stringify(argument: _Types) -> str: + """ + Convert a single value to a string. + + Args: + argument (_Types): The value to be converted to a string. + + Returns: + str: The stringified version of the input. + + Raises: + InvalidParameter: If the input is not a string, integer, or boolean. + + Examples: + >>> stringify(True) + 'true' + >>> stringify(42) + '42' + >>> stringify("Hello") + 'Hello' + """ + if not isinstance(argument, (str, int, bool)): + raise InvalidParameter( + f"Value {argument} is not a string, integer, or boolean.", + ) + if isinstance(argument, (bool, int)): + return str(argument).lower() + return argument + + +def process_param_list( + parammeter_list: typing.List[typing.Union[str, bool, int]], +) -> str: + """ + Concatenate a list of parameters into a string. + + Args: + parammeter_list (typing.List[str | int | bool]): The list of parameters. + + Returns: + str: The concatenated parameters + + Raises: + InvalidParameter: If the value is not a string, integer, or boolean. + + Examples: + >>> process_param_list(["a", "b", "c"]) + "a,b,c" + >>> process_param_list([1, 2, 3]) + "1,2,3" + >>> process_param_list([True, False, True]) + "true,false,true" + >>> process_param_list([True, 1, "c"]) + "true,1,c" + """ + stringified_list = [ + stringify(parameter_element) for parameter_element in parammeter_list + ] + return ",".join(stringified_list) + + +def stringify_search_params(parameter_dict: ParamSchema) -> StringifiedParamSchema: + """ + Convert the search parameters to strings. + + This function takes a dictionary of search parameters and converts all values + to their string representations. List values are converted to comma-separated strings. + + Args: + parameter_dict (ParamSchema): The search parameters. + + Returns: + StringifiedParamSchema: The search parameters as strings. + + Raises: + InvalidParameter: If a value is not a string, integer, or boolean. + + Examples: + >>> stringify_search_params({"a": 1, "b": "c", "d": True}) + {"a": "1", "b": "c", "d": "true"} + >>> stringify_search_params({"a": [1, 2, 3], "b": ["c", "d", "e"]}) + {"a": "1,2,3", "b": "c,d,e"} + >>> stringify_search_params({"a": [True, False, True], "b": [1, 2, 3]}) + {"a": "true,false,true", "b": "1,2,3"} + """ + stringified_params: StringifiedParamSchema = {} + for key, param_value in parameter_dict.items(): + if isinstance(param_value, list): + stringified_params[key] = process_param_list(param_value) + elif isinstance(param_value, (bool, int, str)): + stringified_params[key] = stringify(param_value) + else: + raise InvalidParameter( + f"Value {param_value} is not a string, integer, or boolean", + ) + return stringified_params diff --git a/src/typesense/py.typed b/src/typesense/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/typesense/request_handler.py b/src/typesense/request_handler.py new file mode 100644 index 0000000..38e6c24 --- /dev/null +++ b/src/typesense/request_handler.py @@ -0,0 +1,318 @@ +""" +This module provides functionality for handling HTTP requests in the Typesense client library. + +Classes: + - RequestHandler: Manages HTTP requests to the Typesense API (supports both sync and async). + - SessionFunctionKwargs: Type for keyword arguments in session functions. + +The RequestHandler class interacts with the Typesense API to manage HTTP requests, +handle authentication, and process responses. It provides methods to send requests, +normalize parameters, and handle errors. It supports both sync (httpx.Client) and async (httpx.AsyncClient) clients. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. + +Key Features: +- Handles authentication via API key +- Supports JSON and non-JSON responses +- Provides custom error handling for various HTTP status codes +- Normalizes boolean parameters for API requests +- Supports both sync (httpx.Client) and async (httpx.AsyncClient) HTTP clients + +Note: This module relies on the 'httpx' library for both sync and async operations. +""" + +import json +import sys +from types import MappingProxyType + +import httpx + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.configuration import Configuration +from typesense.exceptions import ( + HTTPStatus0Error, + ObjectAlreadyExists, + ObjectNotFound, + ObjectUnprocessable, + RequestForbidden, + RequestMalformed, + RequestUnauthorized, + ServerError, + ServiceUnavailable, + TypesenseClientError, +) + +TEntityDict = typing.TypeVar("TEntityDict") +TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) +TBody = typing.TypeVar("TBody", bound=typing.Union[str, bytes]) + +_ERROR_CODE_MAP: typing.Mapping[str, typing.Type[TypesenseClientError]] = ( + MappingProxyType( + { + "0": HTTPStatus0Error, + "400": RequestMalformed, + "401": RequestUnauthorized, + "403": RequestForbidden, + "404": ObjectNotFound, + "409": ObjectAlreadyExists, + "422": ObjectUnprocessable, + "500": ServerError, + "503": ServiceUnavailable, + }, + ) +) + + +class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): + """ + Type definition for keyword arguments used in request functions. + + This is an internal abstraction that gets converted to httpx's request parameters. + The `data` field is converted to `content` when passed to httpx. + + Note: `verify` and `timeout` are set on the httpx client, not in request kwargs. + However, we include them here for compatibility with the existing API. + + Attributes: + params (Optional[Union[TParams, None]]): Query parameters for the request. + Passed as `params` to httpx. + + data (Optional[Union[TBody, str, None]]): Body of the request. + Converted to `content` (JSON string) when passed to httpx. + + headers (Optional[Dict[str, str]]): Headers for the request. + Passed as `headers` to httpx. + + timeout (float): Timeout for the request in seconds. + Set on the httpx client, not in request kwargs. + + verify (bool): Whether to verify SSL certificates. + Set on the httpx client, not in request kwargs. + """ + + params: typing.NotRequired[typing.Union[TParams, None]] + data: typing.NotRequired[ + typing.Union[TBody, str, typing.Dict[str, typing.Any], None] + ] + content: typing.NotRequired[typing.Union[TBody, str, None]] + headers: typing.NotRequired[typing.Dict[str, str]] + timeout: typing.NotRequired[float] + + +class RequestHandler: + """ + Handles HTTP requests to the Typesense API (supports both sync and async using httpx). + + This class manages authentication, request sending, and response processing + for interactions with the Typesense API. It can work with both sync (httpx.Client) + and async (httpx.AsyncClient) HTTP clients. + + Attributes: + api_key_header_name (str): The header name for the API key. + config (Configuration): The configuration object for the Typesense client. + """ + + api_key_header_name: typing.Final[str] = "X-TYPESENSE-API-KEY" + + def __init__(self, config: Configuration): + """ + Initialize the RequestHandler with a configuration. + + Args: + config (Configuration): The configuration object for the Typesense client. + """ + self.config = config + + def make_request( + self, + *, + method: str, + url: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + client: typing.Union[httpx.Client, httpx.AsyncClient], + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[ + TEntityDict, + str, + typing.Coroutine[typing.Any, typing.Any, typing.Union[TEntityDict, str]], + ]: + """ + Make an HTTP request to the Typesense API (supports both sync and async using httpx). + + Args: + method (str): The HTTP method (e.g., "GET", "POST", "PUT", "PATCH", "DELETE"). + + url (str): The URL to send the request to. + + entity_type (Type[TEntityDict]): The expected type of the response entity. + + as_json (bool): Whether to return the response as JSON. Defaults to True. + + client: The httpx client to use (httpx.Client for sync, httpx.AsyncClient for async). + + kwargs: Additional keyword arguments for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + If using AsyncClient, returns a coroutine. + + Raises: + TypesenseClientError: If the API returns an error response. + """ + headers = { + self.api_key_header_name: self.config.api_key, + } + headers.update(self.config.additional_headers) + + request_kwargs: SessionFunctionKwargs[TParams, TBody] = typing.cast( + SessionFunctionKwargs[TParams, TBody], + { + "headers": headers, + "timeout": self.config.connection_timeout_seconds, + }, + ) + + if params := kwargs.get("params"): + self.normalize_params(params) + request_kwargs["params"] = params + + if body := kwargs.get("data"): + if not isinstance(body, (str, bytes)): + body = json.dumps(body) + request_kwargs["content"] = typing.cast(TBody, body) + + if isinstance(client, httpx.AsyncClient): + return self._make_async_request( + method, url, entity_type, as_json, client, **request_kwargs + ) + else: + return self._make_sync_request( + method, url, entity_type, as_json, client, **request_kwargs + ) + + def _make_sync_request( + self, + method: str, + url: str, + entity_type: typing.Type[TEntityDict], + as_json: bool, + client: httpx.Client, + **request_kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """Make a synchronous HTTP request using httpx.Client.""" + params: typing.Union[TParams, None] = request_kwargs.get("params") + content: typing.Union[TBody, str, None] = request_kwargs.get("content") + headers: typing.Dict[str, str] = request_kwargs.get("headers", {}) + + response = client.request( + method, + url, + params=params, + content=content, + headers=headers, + ) + + if response.status_code < 200 or response.status_code >= 300: + error_message = self._get_error_message(response) + raise self._get_exception(response.status_code)( + response.status_code, + error_message, + ) + + if as_json: + res: TEntityDict = typing.cast(TEntityDict, response.json()) + return res + + return response.text + + async def _make_async_request( + self, + method: str, + url: str, + entity_type: typing.Type[TEntityDict], + as_json: bool, + client: httpx.AsyncClient, + **request_kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """Make an asynchronous HTTP request using httpx.AsyncClient.""" + params: typing.Union[TParams, None] = request_kwargs.get("params") + content: typing.Union[TBody, str, None] = request_kwargs.get("content") + headers: typing.Dict[str, str] = request_kwargs.get("headers", {}) + + response = await client.request( + method, + url, + params=params, + content=content, + headers=headers, + ) + + if response.status_code < 200 or response.status_code >= 300: + error_message = self._get_error_message(response) + raise self._get_exception(response.status_code)( + response.status_code, + error_message, + ) + + if as_json: + res: TEntityDict = typing.cast(TEntityDict, response.json()) + return res + + return response.text + + @staticmethod + def normalize_params(params: typing.Dict[str, typing.Any]) -> None: + """ + Normalize boolean parameters in the request. + + Args: + params (Dict[str, Any]): The parameters to normalize. + + Raises: + ValueError: If params is not a dictionary. + """ + if not isinstance(params, typing.Dict): + raise ValueError("Params must be a dictionary.") + for key, parameter_value in params.items(): + if isinstance(parameter_value, bool): + params[key] = str(parameter_value).lower() + + @staticmethod + def _get_error_message(response: httpx.Response) -> str: + """ + Extract the error message from an API response. + + Args: + response (httpx.Response): The API response. + + Returns: + str: The extracted error message or a default message. + """ + content_type = response.headers.get("Content-Type", "") + if content_type.startswith("application/json"): + try: + return typing.cast(str, response.json().get("message", "API error.")) + except (json.JSONDecodeError, httpx.DecodingError): + return f"API error: Invalid JSON response: {response.text}" + if response.text: + return f"API error. {response.text}" + return f"Unknown API error. Full Response: {response}" + + @staticmethod + def _get_exception(http_code: int) -> typing.Type[TypesenseClientError]: + """ + Map an HTTP status code to the appropriate exception type. + + Args: + http_code (int): The HTTP status code. + + Returns: + Type[TypesenseClientError]: The exception type corresponding to the status code. + """ + return _ERROR_CODE_MAP.get(str(http_code), TypesenseClientError) diff --git a/src/typesense/sync/__init__.py b/src/typesense/sync/__init__.py new file mode 100644 index 0000000..4d3db4b --- /dev/null +++ b/src/typesense/sync/__init__.py @@ -0,0 +1,3 @@ +from .client import Client # NOQA + +__all__ = ["Client"] diff --git a/src/typesense/sync/alias.py b/src/typesense/sync/alias.py new file mode 100644 index 0000000..def715e --- /dev/null +++ b/src/typesense/sync/alias.py @@ -0,0 +1,80 @@ +""" +This module provides async functionality for managing individual aliases in Typesense. + +It contains the Alias class, which allows for retrieving and deleting +aliases asynchronously. + +Classes: + Alias: Manages async operations on a single alias in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.alias: Provides AliasSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import ApiCall +from typesense.types.alias import AliasSchema + + +class Alias: + """ + Manages async operations on a single alias in the Typesense API. + + This class provides async methods to retrieve and delete an alias. + + Attributes: + api_call (ApiCall): The ApiCall instance for making async API requests. + name (str): The name of the alias. + """ + + def __init__(self, api_call: ApiCall, name: str): + """ + Initialize the Alias instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + name (str): The name of the alias. + """ + self.api_call = api_call + self.name = name + + def retrieve(self) -> AliasSchema: + """ + Retrieve this specific alias. + + Returns: + AliasSchema: The schema containing the alias details. + """ + response: AliasSchema = self.api_call.get( + self._endpoint_path, + entity_type=AliasSchema, + as_json=True, + ) + return response + + def delete(self) -> AliasSchema: + """ + Delete this specific alias. + + Returns: + AliasSchema: The schema containing the deletion response. + """ + response: AliasSchema = self.api_call.delete( + self._endpoint_path, + entity_type=AliasSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific alias. + + Returns: + str: The constructed endpoint path. + """ + from .aliases import Aliases + + return "/".join([Aliases.resource_path, self.name]) diff --git a/src/typesense/sync/aliases.py b/src/typesense/sync/aliases.py new file mode 100644 index 0000000..ad7ba49 --- /dev/null +++ b/src/typesense/sync/aliases.py @@ -0,0 +1,129 @@ +""" +This module provides async functionality for managing aliases in Typesense. + +It contains the Aliases class, which allows for creating, updating, retrieving, and +accessing individual aliases asynchronously. + +Classes: + Aliases: Manages aliases in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_alias: Provides the Alias class for individual alias operations. + - typesense.types.alias: Provides AliasCreateSchema, AliasSchema, and AliasesResponseSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import ApiCall +from .alias import Alias +from typesense.types.alias import AliasCreateSchema, AliasSchema, AliasesResponseSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class Aliases: + """ + Manages aliases in the Typesense API (async). + + This class provides async methods to create, update, retrieve, and access individual aliases. + + Attributes: + resource_path (str): The API endpoint path for alias operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + aliases (Dict[str, Alias]): A dictionary of Alias instances, keyed by alias name. + """ + + resource_path: typing.Final[str] = "/aliases" + + def __init__(self, api_call: ApiCall): + """ + Initialize the Aliases instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + self.aliases: typing.Dict[str, Alias] = {} + + def __getitem__(self, name: str) -> Alias: + """ + Get or create an Alias instance for a given alias name. + + This method allows accessing aliases using dictionary-like syntax. + If the Alias instance doesn't exist, it creates a new one. + + Args: + name (str): The name of the alias. + + Returns: + Alias: The Alias instance for the specified alias name. + + Example: + >>> aliases = Aliases(async_api_call) + >>> company_alias = aliases["company_alias"] + """ + if not self.aliases.get(name): + self.aliases[name] = Alias(self.api_call, name) + return self.aliases[name] + + def upsert(self, name: str, mapping: AliasCreateSchema) -> AliasSchema: + """ + Create or update an alias. + + Args: + name (str): The name of the alias. + mapping (AliasCreateSchema): The schema for creating or updating the alias. + + Returns: + AliasSchema: The created or updated alias. + + Example: + >>> aliases = Aliases(async_api_call) + >>> alias = await aliases.upsert( + ... "company_alias", {"collection_name": "companies"} + ... ) + """ + response: AliasSchema = self.api_call.put( + self._endpoint_path(name), + body=mapping, + entity_type=AliasSchema, + ) + return response + + def retrieve(self) -> AliasesResponseSchema: + """ + Retrieve all aliases. + + Returns: + AliasesResponseSchema: The schema containing all aliases. + + Example: + >>> aliases = Aliases(async_api_call) + >>> all_aliases = await aliases.retrieve() + >>> for alias in all_aliases["aliases"]: + ... print(alias["name"]) + """ + response: AliasesResponseSchema = self.api_call.get( + Aliases.resource_path, + as_json=True, + entity_type=AliasesResponseSchema, + ) + return response + + def _endpoint_path(self, alias_name: str) -> str: + """ + Construct the API endpoint path for alias operations. + + Args: + alias_name (str): The name of the alias. + + Returns: + str: The constructed endpoint path. + """ + return "/".join([Aliases.resource_path, alias_name]) diff --git a/src/typesense/sync/analytics.py b/src/typesense/sync/analytics.py new file mode 100644 index 0000000..4050ed3 --- /dev/null +++ b/src/typesense/sync/analytics.py @@ -0,0 +1,14 @@ +"""Client for Typesense Analytics module (async).""" + +from .analytics_events import AnalyticsEvents +from .analytics_rules import AnalyticsRules +from .api_call import ApiCall + + +class Analytics: + """Client for v30 Analytics endpoints (async).""" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + self.rules = AnalyticsRules(api_call) + self.events = AnalyticsEvents(api_call) diff --git a/src/typesense/sync/analytics_events.py b/src/typesense/sync/analytics_events.py new file mode 100644 index 0000000..ebdd485 --- /dev/null +++ b/src/typesense/sync/analytics_events.py @@ -0,0 +1,71 @@ +"""Client for Analytics events and status operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from typesense.types.analytics import ( + AnalyticsEvent as AnalyticsEventSchema, + AnalyticsEventCreateResponse, + AnalyticsEventsResponse, + AnalyticsStatus, +) + + +class AnalyticsEvents: + events_path: typing.Final[str] = "/analytics/events" + flush_path: typing.Final[str] = "/analytics/flush" + status_path: typing.Final[str] = "/analytics/status" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + + def create(self, event: AnalyticsEventSchema) -> AnalyticsEventCreateResponse: + response: AnalyticsEventCreateResponse = self.api_call.post( + AnalyticsEvents.events_path, + body=event, + as_json=True, + entity_type=AnalyticsEventCreateResponse, + ) + return response + + def retrieve( + self, + *, + user_id: str, + name: str, + n: int, + ) -> AnalyticsEventsResponse: + params: typing.Dict[str, typing.Union[str, int]] = { + "user_id": user_id, + "name": name, + "n": n, + } + response: AnalyticsEventsResponse = self.api_call.get( + AnalyticsEvents.events_path, + params=params, + as_json=True, + entity_type=AnalyticsEventsResponse, + ) + return response + + def flush(self) -> AnalyticsEventCreateResponse: + response: AnalyticsEventCreateResponse = self.api_call.post( + AnalyticsEvents.flush_path, + body={}, + as_json=True, + entity_type=AnalyticsEventCreateResponse, + ) + return response + + def status(self) -> AnalyticsStatus: + response: AnalyticsStatus = self.api_call.get( + AnalyticsEvents.status_path, + as_json=True, + entity_type=AnalyticsStatus, + ) + return response diff --git a/src/typesense/sync/analytics_rule.py b/src/typesense/sync/analytics_rule.py new file mode 100644 index 0000000..2844004 --- /dev/null +++ b/src/typesense/sync/analytics_rule.py @@ -0,0 +1,31 @@ +"""Per-rule client for Analytics rules operations (async).""" + +from .api_call import ApiCall +from typesense.types.analytics import AnalyticsRuleSchema + + +class AnalyticsRule: + def __init__(self, api_call: ApiCall, rule_name: str) -> None: + self.api_call = api_call + self.rule_name = rule_name + + @property + def _endpoint_path(self) -> str: + from .analytics_rules import AnalyticsRules + + return "/".join([AnalyticsRules.resource_path, self.rule_name]) + + def retrieve(self) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=AnalyticsRuleSchema, + ) + return response + + def delete(self) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = self.api_call.delete( + self._endpoint_path, + entity_type=AnalyticsRuleSchema, + ) + return response diff --git a/src/typesense/sync/analytics_rule_v1.py b/src/typesense/sync/analytics_rule_v1.py new file mode 100644 index 0000000..38e8f41 --- /dev/null +++ b/src/typesense/sync/analytics_rule_v1.py @@ -0,0 +1,117 @@ +""" +This module provides async functionality for managing individual analytics rules in Typesense (V1). + +Classes: + - AnalyticsRuleV1: Handles async operations related to a specific analytics rule. + +Methods: + - __init__: Initializes the AnalyticsRuleV1 object. + - _endpoint_path: Constructs the API endpoint path for this specific analytics rule. + - retrieve: Retrieves the details of this specific analytics rule. + - delete: Deletes this specific analytics rule. + +The AnalyticsRuleV1 class interacts with the Typesense API to manage operations on a +specific analytics rule. It provides methods to retrieve and delete individual rules. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typing_extensions import deprecated + +from .api_call import ApiCall +from typesense.logger import warn_deprecation +from typesense.types.analytics_rule_v1 import ( + RuleDeleteSchema, + RuleSchemaForCounters, + RuleSchemaForQueries, +) + + +@deprecated( + "SyncAnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead." +) +class AnalyticsRuleV1: + """ + Class for managing individual analytics rules in Typesense (V1) (async). + + This class provides methods to interact with a specific analytics rule, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + + def __init__(self, api_call: ApiCall, rule_id: str): + """ + Initialize the AnalyticsRuleV1 object. + + Args: + api_call (ApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + self.api_call = api_call + self.rule_id = rule_id + + def retrieve( + self, + ) -> typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]: + """ + Retrieve this specific analytics rule. + + Returns: + Union[RuleSchemaForQueries, RuleSchemaForCounters]: + The schema containing the rule details. + """ + response: typing.Union[ + RuleSchemaForQueries, RuleSchemaForCounters + ] = self.api_call.get( + self._endpoint_path, + entity_type=dict, + as_json=True, + ) + return typing.cast( + typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], + response, + ) + + def delete(self) -> RuleDeleteSchema: + """ + Delete this specific analytics rule. + + Returns: + RuleDeleteSchema: The schema containing the deletion response. + """ + response: RuleDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=RuleDeleteSchema, + ) + + return response + + @property + @warn_deprecation( # type: ignore[untyped-decorator] + "SyncAnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead.", + flag_name="analytics_rules_v1_deprecation", + ) + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific analytics rule. + + Returns: + str: The constructed endpoint path. + """ + from .analytics_rules_v1 import AnalyticsRulesV1 + + return "/".join([AnalyticsRulesV1.resource_path, self.rule_id]) diff --git a/src/typesense/sync/analytics_rules.py b/src/typesense/sync/analytics_rules.py new file mode 100644 index 0000000..26c11d5 --- /dev/null +++ b/src/typesense/sync/analytics_rules.py @@ -0,0 +1,62 @@ +"""Client for Analytics rules collection operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .analytics_rule import AnalyticsRule +from .api_call import ApiCall +from typesense.types.analytics import ( + AnalyticsRuleCreate, + AnalyticsRuleSchema, + AnalyticsRuleUpdate, +) + + +class AnalyticsRules(object): + resource_path: typing.Final[str] = "/analytics/rules" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + self.rules: typing.Dict[str, AnalyticsRule] = {} + + def __getitem__(self, rule_name: str) -> AnalyticsRule: + if rule_name not in self.rules: + self.rules[rule_name] = AnalyticsRule(self.api_call, rule_name) + return self.rules[rule_name] + + def create(self, rule: AnalyticsRuleCreate) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = self.api_call.post( + AnalyticsRules.resource_path, + body=rule, + as_json=True, + entity_type=AnalyticsRuleSchema, + ) + return response + + def retrieve( + self, *, rule_tag: typing.Union[str, None] = None + ) -> typing.List[AnalyticsRuleSchema]: + params: typing.Dict[str, str] = {} + if rule_tag: + params["rule_tag"] = rule_tag + response: typing.List[AnalyticsRuleSchema] = self.api_call.get( + AnalyticsRules.resource_path, + params=params if params else None, + as_json=True, + entity_type=typing.List[AnalyticsRuleSchema], + ) + return response + + def upsert( + self, rule_name: str, update: AnalyticsRuleUpdate + ) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = self.api_call.put( + "/".join([AnalyticsRules.resource_path, rule_name]), + body=update, + entity_type=AnalyticsRuleSchema, + ) + return response diff --git a/src/typesense/sync/analytics_rules_v1.py b/src/typesense/sync/analytics_rules_v1.py new file mode 100644 index 0000000..e63f802 --- /dev/null +++ b/src/typesense/sync/analytics_rules_v1.py @@ -0,0 +1,179 @@ +""" +This module provides async functionality for managing analytics rules in Typesense (V1). + +Classes: + - AnalyticsRulesV1: Handles async operations related to analytics rules. + +Methods: + - __init__: Initializes the AnalyticsRulesV1 object. + - __getitem__: Retrieves or creates an AnalyticsRuleV1 object for a given rule_id. + - create: Creates a new analytics rule. + - upsert: Creates or updates an analytics rule. + - retrieve: Retrieves all analytics rules. + +Attributes: + - resource_path: The API resource path for analytics rules. + +The AnalyticsRulesV1 class interacts with the Typesense API to manage analytics rule operations. +It provides methods to create, update, and retrieve analytics rules, as well as access +individual AnalyticsRuleV1 objects. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typesense.logger import warn_deprecation + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .analytics_rule_v1 import AnalyticsRuleV1 +from .api_call import ApiCall +from typesense.types.analytics_rule_v1 import ( + RuleCreateSchemaForCounters, + RuleCreateSchemaForQueries, + RuleSchemaForCounters, + RuleSchemaForQueries, + RulesRetrieveSchema, +) + +_RuleParams = typing.Union[ + typing.Dict[str, typing.Union[str, int, bool]], + None, +] + + +class AnalyticsRulesV1(object): + """ + Class for managing analytics rules in Typesense (V1) (async). + + This class provides methods to interact with analytics rules, including + creating, updating, and retrieving them. + + Attributes: + resource_path (str): The API resource path for analytics rules. + api_call (ApiCall): The API call object for making requests. + rules (Dict[str, AnalyticsRuleV1]): A dictionary of AnalyticsRuleV1 objects. + """ + + resource_path: typing.Final[str] = "/analytics/rules" + + def __init__(self, api_call: ApiCall): + """ + Initialize the AnalyticsRulesV1 object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.rules: typing.Dict[str, AnalyticsRuleV1] = {} + + def __getitem__(self, rule_id: str) -> AnalyticsRuleV1: + """ + Get or create an AnalyticsRuleV1 object for a given rule_id. + + Args: + rule_id (str): The ID of the analytics rule. + + Returns: + AnalyticsRuleV1: The AnalyticsRuleV1 object for the given ID. + """ + if not self.rules.get(rule_id): + self.rules[rule_id] = AnalyticsRuleV1(self.api_call, rule_id) + return self.rules[rule_id] + + @warn_deprecation( # type: ignore[untyped-decorator] + "SyncAnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) + def create( + self, + rule: typing.Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries], + rule_parameters: _RuleParams = None, + ) -> typing.Union[RuleSchemaForCounters, RuleSchemaForQueries]: + """ + Create a new analytics rule. + + This method can create both counter rules and query rules. + + Args: + rule (Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries]): + The rule schema. Use RuleCreateSchemaForCounters for counter rules + and RuleCreateSchemaForQueries for query rules. + + rule_parameters (_RuleParams, optional): Additional rule parameters. + + Returns: + Union[RuleSchemaForCounters, RuleSchemaForQueries]: + The created rule. Returns RuleSchemaForCounters for counter rules + and RuleSchemaForQueries for query rules. + """ + response: typing.Union[ + RuleSchemaForCounters, RuleSchemaForQueries + ] = self.api_call.post( + AnalyticsRulesV1.resource_path, + body=rule, + params=rule_parameters, + as_json=True, + entity_type=dict, + ) + return typing.cast( + typing.Union[RuleSchemaForCounters, RuleSchemaForQueries], + response, + ) + + @warn_deprecation( # type: ignore[untyped-decorator] + "SyncAnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) + def upsert( + self, + rule_id: str, + rule: typing.Union[RuleCreateSchemaForQueries, RuleSchemaForCounters], + ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: + """ + Create or update an analytics rule. + + Args: + rule_id (str): The ID of the rule to upsert. + rule (Union[RuleCreateSchemaForQueries, RuleSchemaForCounters]): The rule schema. + + Returns: + Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: The upserted rule. + """ + response: typing.Union[ + RuleSchemaForCounters, RuleCreateSchemaForQueries + ] = self.api_call.put( + "/".join([self.resource_path, rule_id]), + body=rule, + entity_type=dict, + ) + return typing.cast( + typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], + response, + ) + + @warn_deprecation( # type: ignore[untyped-decorator] + "SyncAnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) + def retrieve(self) -> RulesRetrieveSchema: + """ + Retrieve all analytics rules. + + Returns: + RulesRetrieveSchema: The schema containing all analytics rules. + """ + response: RulesRetrieveSchema = self.api_call.get( + AnalyticsRulesV1.resource_path, + as_json=True, + entity_type=RulesRetrieveSchema, + ) + return response diff --git a/src/typesense/sync/analytics_v1.py b/src/typesense/sync/analytics_v1.py new file mode 100644 index 0000000..f47f25e --- /dev/null +++ b/src/typesense/sync/analytics_v1.py @@ -0,0 +1,49 @@ +""" +This module provides async functionality for managing analytics (V1) in Typesense. + +Classes: + - AnalyticsV1: Handles async operations related to analytics, including access to analytics rules. + +Methods: + - __init__: Initializes the AnalyticsV1 object. + +The AnalyticsV1 class serves as an entry point for analytics-related operations in Typesense, +currently providing access to AnalyticsRulesV1. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typing_extensions import deprecated + +from .analytics_rules_v1 import AnalyticsRulesV1 +from .api_call import ApiCall + + +@deprecated("SyncAnalyticsV1 is deprecated on v30+. Use client.analytics instead.") +class AnalyticsV1(object): + """ + Class for managing analytics in Typesense (V1) (async). + + This class provides access to analytics-related functionalities, + currently including operations on analytics rules. + + Attributes: + rules (AnalyticsRulesV1): An instance of AnalyticsRulesV1 for managing analytics rules. + """ + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the AnalyticsV1 object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self._rules = AnalyticsRulesV1(api_call) + + @property + def rules(self) -> AnalyticsRulesV1: + return self._rules diff --git a/src/typesense/sync/api_call.py b/src/typesense/sync/api_call.py new file mode 100644 index 0000000..a24ce6a --- /dev/null +++ b/src/typesense/sync/api_call.py @@ -0,0 +1,541 @@ +""" +This module provides async functionality for making API calls to a Typesense server. + +It contains the ApiCall class, which is responsible for executing async HTTP requests +to the Typesense API, handling retries, and managing node health. + +Key features: +- Support for GET, POST, PUT, PATCH, and DELETE HTTP methods (async) +- Automatic retries on server errors +- Node health management +- Type-safe request execution with overloaded methods + +Classes: + ApiCall: Manages async API calls to the Typesense server. + +Dependencies: + - httpx: For making async HTTP requests + - typesense.configuration: Provides Configuration and Node classes + - typesense.exceptions: Custom exception classes + - typesense.node_manager: Provides NodeManager class + +Usage: + from typesense.configuration import Configuration + from .api_call import ApiCall + + config = Configuration(...) + api_call = ApiCall(config) + response = await api_call.get("/collections", SomeEntityType) + +Note: This module is part of the Typesense Python client library and is used internally +by other components of the library. +""" + +import sys +from types import MappingProxyType, TracebackType + +import httpx + +from typesense.configuration import Configuration, Node +from typesense.exceptions import ( + HTTPStatus0Error, + ObjectAlreadyExists, + ObjectNotFound, + ObjectUnprocessable, + RequestForbidden, + RequestMalformed, + RequestUnauthorized, + ServerError, + ServiceUnavailable, + TypesenseClientError, +) +from typesense.node_manager import NodeManager +from typesense.request_handler import RequestHandler + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TEntityDict = typing.TypeVar("TEntityDict") +TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) +TBody = typing.TypeVar( + "TBody", bound=typing.Union[str, bytes, typing.Mapping[str, typing.Any]] +) + + +class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): + """ + Type definition for keyword arguments used in request functions. + + This is an internal abstraction that gets converted to httpx's request parameters. + The `data` field is converted to `content` when passed to httpx. + + Note: `verify` and `timeout` are set on the httpx client, not in request kwargs. + However, we include them here for compatibility with the existing API. + + Attributes: + params (Optional[Union[TParams, None]]): Query parameters for the request. + Passed as `params` to httpx. + + data (Optional[Union[TBody, str, None]]): Body of the request. + Converted to `content` (JSON string) when passed to httpx. + + headers (Optional[Dict[str, str]]): Headers for the request. + Passed as `headers` to httpx. + + timeout (float): Timeout for the request in seconds. + Set on the httpx client, not in request kwargs. + + verify (bool): Whether to verify SSL certificates. + Set on the httpx client, not in request kwargs. + """ + + params: typing.NotRequired[typing.Union[TParams, None]] + data: typing.NotRequired[typing.Union[TBody, None]] + content: typing.NotRequired[typing.Union[TBody, str, None]] + headers: typing.NotRequired[typing.Dict[str, str]] + timeout: typing.NotRequired[float] + + +_ERROR_CODE_MAP: typing.Final[ + typing.Mapping[str, typing.Type[TypesenseClientError]] +] = MappingProxyType( + { + "0": HTTPStatus0Error, + "400": RequestMalformed, + "401": RequestUnauthorized, + "403": RequestForbidden, + "404": ObjectNotFound, + "409": ObjectAlreadyExists, + "422": ObjectUnprocessable, + "500": ServerError, + "503": ServiceUnavailable, + }, +) + +_SERVER_ERRORS: typing.Final[ + typing.Tuple[ + typing.Type[httpx.TimeoutException], + typing.Type[httpx.ConnectError], + typing.Type[httpx.HTTPError], + typing.Type[httpx.RequestError], + typing.Type[HTTPStatus0Error], + typing.Type[ServerError], + typing.Type[ServiceUnavailable], + ] +] = ( + httpx.TimeoutException, + httpx.ConnectError, + httpx.HTTPError, + httpx.RequestError, + HTTPStatus0Error, + ServerError, + ServiceUnavailable, +) + + +class ApiCall: + """ + Manages async API calls to the Typesense server. + + This class handles the execution of async HTTP requests to the Typesense API, + including retries, node health management, and error handling. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + node_manager (NodeManager): Manages the nodes in the Typesense cluster. + _client (httpx.Client): The httpx async client for making requests. + """ + + def __init__(self, config: Configuration): + """ + Initialize the ApiCall instance. + + Args: + config (Configuration): The configuration object for the Typesense client. + """ + self.config = config + self.node_manager = NodeManager(config) + self.request_handler = RequestHandler(config) + self._client = httpx.Client( + timeout=config.connection_timeout_seconds, + ) + + def __enter__(self) -> "ApiCall": + """Async context manager entry.""" + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_val: typing.Optional[BaseException], + exc_tb: typing.Optional[TracebackType], + ) -> None: + """Async context manager exit.""" + self._client.close() + + def close(self) -> None: + """Close the httpx client.""" + self._client.close() + + @typing.overload + def get( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + params: typing.Union[TParams, None] = None, + ) -> str: + """ + Execute an async GET request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (False): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + str: The response, as a string. + """ + + @typing.overload + def get( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True] = True, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async GET request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (True): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + EntityDict: The response, as a JSON object. + """ + + def get( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + params: typing.Union[TParams, None] = None, + ) -> typing.Union[TEntityDict, str]: + """ + Execute an async GET request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + """ + return self._execute_request( + "GET", + endpoint, + entity_type, + as_json, + params=params, + ) + + @typing.overload + def post( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> str: + """ + Execute an async POST request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (False): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + body (Union[TBody, None], optional): Request body. + + Returns: + str: The response, as a string. + """ + + @typing.overload + def post( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True] = True, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> TEntityDict: + """ + Execute an async POST request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (True): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + body (Union[TBody, None], optional): Request body. + + Returns: + EntityDict: The response, as a JSON object. + """ + + def post( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> typing.Union[str, TEntityDict]: + """ + Execute an async POST request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. + body (Union[TBody, None], optional): Request body. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + """ + return self._execute_request( + "POST", + endpoint, + entity_type, + as_json, + params=params, + data=body, + ) + + def put( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + body: TBody, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async PUT request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + body (TBody): Request body. + + Returns: + EntityDict: The response, as a JSON object. + """ + return self._execute_request( + "PUT", + endpoint, + entity_type, + as_json=True, + params=params, + data=body, + ) + + def patch( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + body: TBody, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async PATCH request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + body (TBody): Request body. + + Returns: + EntityDict: The response, as a JSON object. + """ + return self._execute_request( + "PATCH", + endpoint, + entity_type, + as_json=True, + params=params, + data=body, + ) + + def delete( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute an async DELETE request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + EntityDict: The response, as a JSON object. + """ + return self._execute_request( + "DELETE", + endpoint, + entity_type, + as_json=True, + params=params, + ) + + @typing.overload + def _execute_request( + self, + method: str, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True], + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict: + """Execute an async request with retry logic.""" + + @typing.overload + def _execute_request( + self, + method: str, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> str: + """Execute an async request with retry logic.""" + + def _execute_request( + self, + method: str, + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """ + Execute an async request to the Typesense API with retry logic. + + This method handles the actual execution of the request, including + node selection, error handling, and retries. + + Args: + method (str): The HTTP method to use (GET, POST, PUT, PATCH, DELETE). + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + last_exception (Union[None, Exception], optional): The last exception encountered. + num_retries (int): The current number of retries attempted. + kwargs: Additional keyword arguments for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + + Raises: + TypesenseClientError: If all nodes are unhealthy or max retries are exceeded. + """ + if num_retries > self.config.num_retries: + if last_exception: + raise last_exception + raise TypesenseClientError("All nodes are unhealthy") + + node, url, request_kwargs = self._prepare_request_params(endpoint, **kwargs) + + try: + return self._make_request_and_process_response( + method, + url, + entity_type, + as_json, + **request_kwargs, + ) + except _SERVER_ERRORS as server_error: + self.node_manager.set_node_health(node, is_healthy=False) + return self._execute_request( + method, + endpoint, + entity_type, + as_json, + last_exception=server_error, + num_retries=num_retries + 1, + **kwargs, + ) + + def _make_request_and_process_response( + self, + method: str, + url: str, + entity_type: typing.Type[TEntityDict], + as_json: bool, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """Make the async API request and process the response.""" + request_response = self.request_handler.make_request( + method=method, + url=url, + as_json=as_json, + entity_type=entity_type, + client=self._client, + **kwargs, + ) + self.node_manager.set_node_health( + self.node_manager.get_node(), + is_healthy=True, + ) + return ( + typing.cast(TEntityDict, request_response) + if as_json + else typing.cast(str, request_response) + ) + + def _prepare_request_params( + self, + endpoint: str, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Tuple[Node, str, SessionFunctionKwargs[TParams, TBody]]: + """ + Prepare request parameters including node selection and URL construction. + + Args: + endpoint: The API endpoint path. + **kwargs: Request parameters following SessionFunctionKwargs structure. + + Returns: + Tuple of (node, full_url, kwargs_dict) where kwargs_dict contains + the request parameters as a regular dict for further processing. + """ + node = self.node_manager.get_node() + url = node.url() + endpoint + + if params := kwargs.get("params"): + self.request_handler.normalize_params(params) + + return node, url, kwargs diff --git a/src/typesense/sync/client.py b/src/typesense/sync/client.py new file mode 100644 index 0000000..ef1afb0 --- /dev/null +++ b/src/typesense/sync/client.py @@ -0,0 +1,168 @@ +""" +This module provides the main async client interface for interacting with the Typesense API. + +It contains the Client class, which serves as the entry point for all Typesense operations, +integrating various components like collections, multi-search, keys, aliases, analytics, etc. + +Classes: + Client: The main client class for interacting with Typesense. + +Dependencies: + - typesense.aliases: Provides the Aliases class. + - typesense.analytics: Provides the Analytics class. + - typesense.api_call: Provides the ApiCall class for making API requests. + - typesense.collection: Provides the Collection class. + - typesense.collections: Provides the Collections class. + - typesense.configuration: Provides AsyncConfiguration and ConfigDict types. + - typesense.conversations_models: Provides the ConversationsModels class. + - typesense.debug: Provides the Debug class. + - typesense.keys: Provides the Keys class. + - typesense.metrics: Provides the Metrics class. + - typesense.multi_search: Provides the MultiSearch class. + - typesense.operations: Provides the Operations class. + - typesense.stopwords: Provides the Stopwords class. + - typesense.types.document: Provides the DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from typing_extensions import deprecated + +from typesense.types.document import DocumentSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .aliases import Aliases +from .analytics import Analytics +from .analytics_v1 import AnalyticsV1 +from .api_call import ApiCall +from .collection import Collection +from .collections import Collections +from .conversations_models import ConversationsModels +from .curation_sets import CurationSets +from .debug import Debug +from .keys import Keys +from .metrics import Metrics +from .multi_search import MultiSearch +from .nl_search_models import NLSearchModels +from .operations import Operations +from .stemming import Stemming +from .stopwords import Stopwords +from .synonym_sets import SynonymSets +from typesense.configuration import ConfigDict, Configuration + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + + +class Client: + """ + The main client class for interacting with Typesense. + + This class serves as the entry point for all Typesense operations. It initializes + and provides access to various components of the Typesense SDK, such as collections, + multi-search, keys, aliases, analytics, stemming, operations, debug, stopwords, + and conversation models. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + api_call (ApiCall): The ApiCall instance for making API requests. + collections (Collections[DocumentSchema]): Instance for managing collections. + multi_search (MultiSearch): Instance for performing multi-search operations. + keys (Keys): Instance for managing API keys. + aliases (Aliases): Instance for managing collection aliases. + analyticsV1 (AnalyticsV1): Instance for analytics operations (V1). + analytics (Analytics): Instance for analytics operations (v30). + curation_sets (CurationSets): Instance for Curation Sets (v30+) + stemming (Stemming): Instance for stemming dictionary operations. + operations (Operations): Instance for various Typesense operations. + debug (Debug): Instance for debug operations. + stopwords (Stopwords): Instance for managing stopwords. + metrics (Metrics): Instance for retrieving system and Typesense metrics. + conversations_models (ConversationsModels): Instance for managing conversation models. + """ + + def __init__(self, config_dict: ConfigDict) -> None: + """ + Initialize the Client instance. + + Args: + config_dict (ConfigDict): + A dictionary containing the configuration for the Typesense client. + + Example: + >>> config = { + ... "api_key": "your_api_key", + ... "nodes": [ + ... {"host": "localhost", "port": "8108", "protocol": "http"} + ... ], + ... "connection_timeout_seconds": 2, + ... } + >>> client = Client(config) + """ + self.config = Configuration(config_dict) + self.api_call = ApiCall(self.config) + self.collections: Collections[DocumentSchema] = Collections( + self.api_call + ) + self.multi_search = MultiSearch(self.api_call) + self.keys = Keys(self.api_call) + self.aliases = Aliases(self.api_call) + self._analyticsV1 = AnalyticsV1(self.api_call) + self.analytics = Analytics(self.api_call) + self.stemming = Stemming(self.api_call) + self.curation_sets = CurationSets(self.api_call) + self.operations = Operations(self.api_call) + self.debug = Debug(self.api_call) + self.stopwords = Stopwords(self.api_call) + self.synonym_sets = SynonymSets(self.api_call) + self.metrics = Metrics(self.api_call) + self.conversations_models = ConversationsModels(self.api_call) + self.nl_search_models = NLSearchModels(self.api_call) + + @property + @deprecated( + "AnalyticsV1 is deprecated on v30+. Use client.analytics instead.", + category=None, + ) + def analyticsV1(self) -> AnalyticsV1: + return self._analyticsV1 + + def typed_collection( + self, + *, + model: typing.Type[TDoc], + name: typing.Union[str, None] = None, + ) -> Collection[TDoc]: + """ + Get a Collection instance for a specific document model. + + This method allows retrieving a Collection instance typed to a specific document model. + If no name is provided, it uses the lowercase name of the model class as + the collection name. + + Args: + model (Type[TDoc]): The document model class. + name (Union[str, None], optional): + The name of the collection. If None, uses the lowercase model class name. + + Returns: + Collection[TDoc]: An Collection instance typed to the specified document model. + + Example: + >>> class Company(DocumentSchema): + ... name: str + ... num_employees: int + >>> client = Client(config) + >>> companies_collection = client.typed_collection(model=Company) + # This is equivalent to: + # companies_collection = client.typed_collection(model=Company, name="company") + """ + if name is None: + name = model.__name__.lower() + collection: Collection[TDoc] = self.collections[name] + return collection diff --git a/src/typesense/sync/collection.py b/src/typesense/sync/collection.py new file mode 100644 index 0000000..90821ce --- /dev/null +++ b/src/typesense/sync/collection.py @@ -0,0 +1,160 @@ +""" +This module provides async functionality for managing individual collections in the Typesense API. + +It contains the Collection class, which allows for retrieving, updating, and deleting +collections asynchronously. + +Classes: + Collection: Manages async operations on a single collection in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.collection: Provides CollectionSchema and CollectionUpdateSchema types. + - typesense.types.document: Provides DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from typing_extensions import deprecated + +from typesense.types.collection import CollectionSchema, CollectionUpdateSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from .documents import Documents +from .overrides import Overrides +from .synonyms import Synonyms +from typesense.types.document import DocumentSchema + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema, covariant=True) + + +class Collection(typing.Generic[TDoc]): + """ + Manages async operations on a single collection in the Typesense API. + + This class provides async methods to retrieve, update, and delete a collection. + It is generic over the document type TDoc, which should be a subtype of DocumentSchema. + + Attributes: + name (str): The name of the collection. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + def __init__(self, api_call: ApiCall, name: str): + """ + Initialize the Collection instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + name (str): The name of the collection. + """ + self.name = name + self.api_call = api_call + + self.documents: Documents[TDoc] = Documents(api_call, name) + self._overrides = Overrides(api_call, name) + self._synonyms = Synonyms(api_call, name) + + def retrieve(self) -> CollectionSchema: + """ + Retrieve the schema of this collection from Typesense. + + Returns: + CollectionSchema: The schema of the collection. + """ + response: CollectionSchema = self.api_call.get( + endpoint=self._endpoint_path, + entity_type=CollectionSchema, + as_json=True, + ) + return response + + def update( + self, schema_change: CollectionUpdateSchema + ) -> CollectionUpdateSchema: + """ + Update the schema of this collection in Typesense. + + Args: + schema_change (CollectionUpdateSchema): + The changes to apply to the collection schema. + + Returns: + CollectionUpdateSchema: The updated schema of the collection. + """ + response: CollectionUpdateSchema = self.api_call.patch( + endpoint=self._endpoint_path, + body=schema_change, + entity_type=CollectionUpdateSchema, + ) + return response + + def delete( + self, + delete_parameters: typing.Union[ + typing.Dict[str, typing.Union[str, bool]], + None, + ] = None, + ) -> CollectionSchema: + """ + Delete this collection from Typesense. + + Args: + delete_parameters (Union[Dict[str, Union[str, bool]], None], optional): + Additional parameters for the delete operation. Defaults to None. + + Returns: + CollectionSchema: The schema of the deleted collection. + """ + response: CollectionSchema = self.api_call.delete( + self._endpoint_path, + entity_type=CollectionSchema, + params=delete_parameters, + ) + return response + + @property + @deprecated( + "Overrides is deprecated on v30+. Use client.curation_sets instead.", + category=None, + ) + def overrides(self) -> Overrides: + """Return the Overrides instance for this collection. + + Returns: + Overrides: The Overrides instance for this collection. + """ + return self._overrides + + @property + @deprecated( + "Synonyms is deprecated on v30+. Use client.synonym_sets instead.", + category=None, + ) + def synonyms(self) -> Synonyms: + """Return the Synonyms instance for this collection. + + Returns: + Synonyms: The Synonyms instance for this collection. + """ + """Return the Synonyms instance for this collection.""" + return self._synonyms + + @property + def _endpoint_path(self) -> str: + """ + Get the API endpoint path for this collection. + + Returns: + str: The full endpoint path for the collection. + """ + from .collections import Collections + + return "/".join([Collections.resource_path, self.name]) diff --git a/src/typesense/sync/collections.py b/src/typesense/sync/collections.py new file mode 100644 index 0000000..a0dd1f6 --- /dev/null +++ b/src/typesense/sync/collections.py @@ -0,0 +1,163 @@ +""" +This module provides async functionality for managing collections in the Typesense API. + +It contains the Collections class, which allows for creating, retrieving, and +accessing individual collections asynchronously. + +Classes: + Collections: Manages collections in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_collection: Provides the Collection class for individual collection operations. + - typesense.types.collection: Provides CollectionCreateSchema and CollectionSchema types. + - typesense.types.document: Provides DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from .collection import Collection +from typesense.types.collection import CollectionCreateSchema, CollectionSchema +from typesense.types.document import DocumentSchema + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema, covariant=True) + + +class Collections(typing.Generic[TDoc]): + """ + Manages collections in the Typesense API (async). + + This class provides async methods to create, retrieve, and access individual collections. + It is generic over the document type TDoc, which should be a subtype of DocumentSchema. + + Attributes: + resource_path (str): The API endpoint path for collections operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + collections (Dict[str, Collection[TDoc]]): + A dictionary of Collection instances, keyed by collection name. + """ + + resource_path: typing.Final[str] = "/collections" + + def __init__(self, api_call: ApiCall): + """ + Initialize the Collections instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + self.collections: typing.Dict[str, Collection[TDoc]] = {} + + def __contains__(self, collection_name: str) -> bool: + """ + Check if a collection exists in Typesense. + + This method tries to retrieve the specified collection to check for its existence, + utilizing the Collection.retrieve() method but without caching non-existent collections. + + Args: + collection_name (str): The name of the collection to check. + + Returns: + bool: True if the collection exists, False otherwise. + """ + if collection_name in self.collections: + try: + self.collections[collection_name].retrieve() + return True + except Exception: + self.collections.pop(collection_name, None) + return False + + try: + Collection(self.api_call, collection_name).retrieve() + return True + except Exception: + return False + + def __getitem__(self, collection_name: str) -> Collection[TDoc]: + """ + Get or create an Collection instance for a given collection name. + + This method allows accessing collections using dictionary-like syntax. + If the Collection instance doesn't exist, it creates a new one. + + Args: + collection_name (str): The name of the collection to access. + + Returns: + Collection[TDoc]: The Collection instance for the specified collection name. + + Example: + >>> collections = Collections(async_api_call) + >>> fruits_collection = collections["fruits"] + """ + if not self.collections.get(collection_name): + self.collections[collection_name] = Collection( + self.api_call, + collection_name, + ) + return self.collections[collection_name] + + def create(self, schema: CollectionCreateSchema) -> CollectionSchema: + """ + Create a new collection in Typesense. + + Args: + schema (CollectionCreateSchema): + The schema defining the structure of the new collection. + + Returns: + CollectionSchema: + The schema of the created collection, as returned by the API. + + Example: + >>> collections = Collections(async_api_call) + >>> schema = { + ... "name": "companies", + ... "fields": [ + ... {"name": "company_name", "type": "string"}, + ... {"name": "num_employees", "type": "int32"}, + ... {"name": "country", "type": "string", "facet": True}, + ... ], + ... "default_sorting_field": "num_employees", + ... } + >>> created_schema = await collections.create(schema) + """ + call: CollectionSchema = self.api_call.post( + endpoint=Collections.resource_path, + entity_type=CollectionSchema, + as_json=True, + body=schema, + ) + return call + + def retrieve(self) -> typing.List[CollectionSchema]: + """ + Retrieve all collections from Typesense. + + Returns: + List[CollectionSchema]: + A list of schemas for all collections in the Typesense instance. + + Example: + >>> collections = Collections(async_api_call) + >>> all_collections = await collections.retrieve() + >>> for collection in all_collections: + ... print(collection["name"]) + """ + call: typing.List[CollectionSchema] = self.api_call.get( + endpoint=Collections.resource_path, + as_json=True, + entity_type=typing.List[CollectionSchema], + ) + return call diff --git a/src/typesense/sync/conversation_model.py b/src/typesense/sync/conversation_model.py new file mode 100644 index 0000000..2a0a924 --- /dev/null +++ b/src/typesense/sync/conversation_model.py @@ -0,0 +1,104 @@ +""" +This module provides async functionality for managing individual conversation models in Typesense. + +It contains the ConversationModel class, which allows for retrieving, updating, and deleting +conversation models asynchronously. + +Classes: + ConversationModel: Manages async operations on a single conversation model in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.conversations_model: Provides ConversationModelCreateSchema, ConversationModelDeleteSchema, and ConversationModelSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import ApiCall +from typesense.types.conversations_model import ( + ConversationModelCreateSchema, + ConversationModelDeleteSchema, + ConversationModelSchema, +) + + +class ConversationModel: + """ + Manages async operations on a single conversation model in the Typesense API. + + This class provides async methods to retrieve, update, and delete a conversation model. + + Attributes: + model_id (str): The ID of the conversation model. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + def __init__(self, api_call: ApiCall, model_id: str) -> None: + """ + Initialize the ConversationModel instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + model_id (str): The ID of the conversation model. + """ + self.model_id = model_id + self.api_call = api_call + + def retrieve(self) -> ConversationModelSchema: + """ + Retrieve this specific conversation model. + + Returns: + ConversationModelSchema: The schema containing the conversation model details. + """ + response: ConversationModelSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=ConversationModelSchema, + ) + return response + + def update( + self, model: ConversationModelCreateSchema + ) -> ConversationModelSchema: + """ + Update this specific conversation model. + + Args: + model (ConversationModelCreateSchema): + The schema containing the updated model details. + + Returns: + ConversationModelSchema: The schema containing the updated conversation model. + """ + response: ConversationModelSchema = self.api_call.put( + self._endpoint_path, + body=model, + entity_type=ConversationModelSchema, + ) + return response + + def delete(self) -> ConversationModelDeleteSchema: + """ + Delete this specific conversation model. + + Returns: + ConversationModelDeleteSchema: The schema containing the deletion response. + """ + response: ConversationModelDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=ConversationModelDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific conversation model. + + Returns: + str: The constructed endpoint path. + """ + from .conversations_models import ConversationsModels + + return "/".join([ConversationsModels.resource_path, self.model_id]) diff --git a/src/typesense/sync/conversations_models.py b/src/typesense/sync/conversations_models.py new file mode 100644 index 0000000..ed18837 --- /dev/null +++ b/src/typesense/sync/conversations_models.py @@ -0,0 +1,131 @@ +""" +This module provides async functionality for managing conversation models in Typesense. + +It contains the ConversationsModels class, which allows for creating, retrieving, and +accessing individual conversation models asynchronously. + +Classes: + ConversationsModels: Manages conversation models in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_conversation_model: Provides the ConversationModel class for individual conversation model operations. + - typesense.types.conversations_model: Provides ConversationModelCreateSchema and ConversationModelSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import ApiCall +from .conversation_model import ConversationModel +from typesense.types.conversations_model import ( + ConversationModelCreateSchema, + ConversationModelSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class ConversationsModels: + """ + Manages conversation models in the Typesense API (async). + + This class provides async methods to create, retrieve, and access individual conversation models. + + Attributes: + resource_path (str): The API endpoint path for conversation models operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + conversations_models (Dict[str, ConversationModel]): + A dictionary of ConversationModel instances, keyed by model ID. + """ + + resource_path: typing.Final[str] = "/conversations/models" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the ConversationsModels instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + self.conversations_models: typing.Dict[str, ConversationModel] = {} + + def __getitem__(self, model_id: str) -> ConversationModel: + """ + Get or create an ConversationModel instance for a given model ID. + + This method allows accessing conversation models using dictionary-like syntax. + If the ConversationModel instance doesn't exist, it creates a new one. + + Args: + model_id (str): The ID of the conversation model. + + Returns: + ConversationModel: The ConversationModel instance for the specified model ID. + + Example: + >>> conversations_models = ConversationsModels(async_api_call) + >>> model = conversations_models["model_id"] + """ + if model_id not in self.conversations_models: + self.conversations_models[model_id] = ConversationModel( + self.api_call, + model_id, + ) + return self.conversations_models[model_id] + + def create( + self, model: ConversationModelCreateSchema + ) -> ConversationModelSchema: + """ + Create a new conversation model. + + Args: + model (ConversationModelCreateSchema): + The schema for creating the conversation model. + + Returns: + ConversationModelSchema: The created conversation model. + + Example: + >>> conversations_models = ConversationsModels(async_api_call) + >>> model = await conversations_models.create( + ... { + ... "api_key": "key", + ... "model_name": "openai/gpt-3.5-turbo", + ... "history_collection": "conversation_store", + ... } + ... ) + """ + response: ConversationModelSchema = self.api_call.post( + endpoint=ConversationsModels.resource_path, + entity_type=ConversationModelSchema, + as_json=True, + body=model, + ) + return response + + def retrieve(self) -> typing.List[ConversationModelSchema]: + """ + Retrieve all conversation models. + + Returns: + List[ConversationModelSchema]: A list of all conversation models. + + Example: + >>> conversations_models = ConversationsModels(async_api_call) + >>> all_models = await conversations_models.retrieve() + >>> for model in all_models: + ... print(model["id"]) + """ + response: typing.List[ConversationModelSchema] = self.api_call.get( + endpoint=ConversationsModels.resource_path, + entity_type=typing.List[ConversationModelSchema], + as_json=True, + ) + return response diff --git a/src/typesense/sync/curation_set.py b/src/typesense/sync/curation_set.py new file mode 100644 index 0000000..04f3646 --- /dev/null +++ b/src/typesense/sync/curation_set.py @@ -0,0 +1,211 @@ +""" +This module provides async functionality for managing individual curation sets in Typesense. + +It contains the CurationSet class, which allows for retrieving, updating, deleting, +and managing items within a curation set asynchronously. + +Classes: + CurationSet: Manages async operations on a single curation set in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.curation_set: Provides various curation set schema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from typesense.types.curation_set import ( + CurationItemDeleteSchema, + CurationItemSchema, + CurationSetDeleteSchema, + CurationSetListItemResponseSchema, + CurationSetSchema, + CurationSetUpsertSchema, +) + + +class CurationSet: + """ + Manages async operations on a single curation set in the Typesense API. + + This class provides async methods to retrieve, update, and delete a curation set, + as well as manage items within the curation set. + + Attributes: + api_call (ApiCall): The ApiCall instance for making async API requests. + name (str): The name of the curation set. + """ + + def __init__(self, api_call: ApiCall, name: str) -> None: + """ + Initialize the CurationSet instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + name (str): The name of the curation set. + """ + self.api_call = api_call + self.name = name + + @property + def _endpoint_path(self) -> str: + """ + Get the API endpoint path for this curation set. + + Returns: + str: The full endpoint path for the curation set. + """ + from .curation_sets import CurationSets + + return "/".join([CurationSets.resource_path, self.name]) + + def retrieve(self) -> CurationSetSchema: + """ + Retrieve this specific curation set. + + Returns: + CurationSetSchema: The schema containing the curation set details. + """ + response: CurationSetSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=CurationSetSchema, + ) + return response + + def delete(self) -> CurationSetDeleteSchema: + """ + Delete this specific curation set. + + Returns: + CurationSetDeleteSchema: The schema containing the deletion response. + """ + response: CurationSetDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=CurationSetDeleteSchema, + ) + return response + + def upsert( + self, + payload: CurationSetUpsertSchema, + ) -> CurationSetSchema: + """ + Create or update this curation set. + + Args: + payload (CurationSetUpsertSchema): The schema for creating or updating the curation set. + + Returns: + CurationSetSchema: The created or updated curation set. + """ + response: CurationSetSchema = self.api_call.put( + "/".join([self._endpoint_path]), + body=payload, + entity_type=CurationSetSchema, + ) + return response + + # Items sub-resource + @property + def _items_path(self) -> str: + """ + Get the API endpoint path for items in this curation set. + + Returns: + str: The full endpoint path for items (e.g., /curation_sets/{name}/items). + """ + return "/".join([self._endpoint_path, "items"]) + + def list_items( + self, + *, + limit: typing.Union[int, None] = None, + offset: typing.Union[int, None] = None, + ) -> CurationSetListItemResponseSchema: + """ + List items in this curation set. + + Args: + limit (Union[int, None], optional): Maximum number of items to return. Defaults to None. + offset (Union[int, None], optional): Number of items to skip. Defaults to None. + + Returns: + CurationSetListItemResponseSchema: The list of items in the curation set. + """ + params: typing.Dict[str, typing.Union[int, None]] = { + "limit": limit, + "offset": offset, + } + # Filter out None values to avoid sending them + clean_params: typing.Dict[str, int] = { + k: v for k, v in params.items() if v is not None + } + response: CurationSetListItemResponseSchema = self.api_call.get( + self._items_path, + as_json=True, + entity_type=CurationSetListItemResponseSchema, + params=clean_params or None, + ) + return response + + def get_item(self, item_id: str) -> CurationItemSchema: + """ + Get a specific item from this curation set. + + Args: + item_id (str): The ID of the item to retrieve. + + Returns: + CurationItemSchema: The item schema. + """ + response: CurationItemSchema = self.api_call.get( + "/".join([self._items_path, item_id]), + as_json=True, + entity_type=CurationItemSchema, + ) + return response + + def upsert_item( + self, item_id: str, item: CurationItemSchema + ) -> CurationItemSchema: + """ + Create or update an item in this curation set. + + Args: + item_id (str): The ID of the item. + item (CurationItemSchema): The item schema. + + Returns: + CurationItemSchema: The created or updated item. + """ + response: CurationItemSchema = self.api_call.put( + "/".join([self._items_path, item_id]), + body=item, + entity_type=CurationItemSchema, + ) + return response + + def delete_item(self, item_id: str) -> CurationItemDeleteSchema: + """ + Delete an item from this curation set. + + Args: + item_id (str): The ID of the item to delete. + + Returns: + CurationItemDeleteSchema: The deletion response. + """ + response: CurationItemDeleteSchema = self.api_call.delete( + "/".join([self._items_path, item_id]), + entity_type=CurationItemDeleteSchema, + ) + return response diff --git a/src/typesense/sync/curation_sets.py b/src/typesense/sync/curation_sets.py new file mode 100644 index 0000000..48f0ea0 --- /dev/null +++ b/src/typesense/sync/curation_sets.py @@ -0,0 +1,91 @@ +""" +This module provides async functionality for managing curation sets in Typesense. + +It contains the CurationSets class, which allows for retrieving and +accessing individual curation sets asynchronously. + +Classes: + CurationSets: Manages curation sets in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_curation_set: Provides the CurationSet class for individual curation set operations. + - typesense.types.curation_set: Provides CurationSetsListResponseSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from .curation_set import CurationSet +from typesense.types.curation_set import CurationSetsListResponseSchema + + +class CurationSets: + """ + Manages curation sets in the Typesense API (async). + + This class provides async methods to retrieve and access individual curation sets. + + Attributes: + resource_path (str): The API endpoint path for curation sets operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/curation_sets" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the CurationSets instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + + def retrieve(self) -> CurationSetsListResponseSchema: + """ + Retrieve all curation sets. + + Returns: + CurationSetsListResponseSchema: The list of all curation sets. + + Example: + >>> curation_sets = CurationSets(async_api_call) + >>> all_sets = await curation_sets.retrieve() + >>> for set in all_sets: + ... print(set["name"]) + """ + response: CurationSetsListResponseSchema = self.api_call.get( + CurationSets.resource_path, + as_json=True, + entity_type=CurationSetsListResponseSchema, + ) + return response + + def __getitem__(self, curation_set_name: str) -> CurationSet: + """ + Get or create an CurationSet instance for a given curation set name. + + This method allows accessing curation sets using dictionary-like syntax. + If the CurationSet instance doesn't exist, it creates a new one. + + Args: + curation_set_name (str): The name of the curation set. + + Returns: + CurationSet: The CurationSet instance for the specified name. + + Example: + >>> curation_sets = CurationSets(async_api_call) + >>> products_set = curation_sets["products"] + """ + from .curation_set import CurationSet as PerSet + + return PerSet(self.api_call, curation_set_name) diff --git a/src/typesense/sync/debug.py b/src/typesense/sync/debug.py new file mode 100644 index 0000000..6fb496a --- /dev/null +++ b/src/typesense/sync/debug.py @@ -0,0 +1,71 @@ +""" +This module provides async functionality for accessing debug information in Typesense. + +It contains the Debug class, which allows for retrieving debug information +asynchronously. + +Classes: + Debug: Manages async operations for accessing debug information in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.debug: Provides DebugResponseSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from typesense.types.debug import DebugResponseSchema + + +class Debug: + """ + Manages async operations for accessing debug information in the Typesense API. + + This class provides async methods to retrieve debug information from the Typesense server, + which can be useful for system diagnostics and troubleshooting. + + Attributes: + resource_path (str): The API resource path for debug operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/debug" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the Debug instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + + def retrieve(self) -> DebugResponseSchema: + """ + Retrieve debug information from the Typesense server. + + This method sends an async GET request to the debug endpoint and returns + the server's debug information. + + Returns: + DebugResponseSchema: A schema containing the debug information. + + Example: + >>> debug = Debug(async_api_call) + >>> info = await debug.retrieve() + >>> print(info["version"]) + """ + response: DebugResponseSchema = self.api_call.get( + Debug.resource_path, + as_json=True, + entity_type=DebugResponseSchema, + ) + return response diff --git a/src/typesense/sync/document.py b/src/typesense/sync/document.py new file mode 100644 index 0000000..0daf2b6 --- /dev/null +++ b/src/typesense/sync/document.py @@ -0,0 +1,150 @@ +""" +This module provides async functionality for managing individual documents in Typesense collections. + +It contains the Document class, which allows for retrieving, updating, and deleting +documents asynchronously. + +Classes: + Document: Manages async operations on a single document in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.document: Provides various document schema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import ApiCall +from typesense.types.document import ( + DeleteSingleDocumentParameters, + DirtyValuesParameters, + DocumentSchema, + RetrieveParameters, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + + +class Document(typing.Generic[TDoc]): + """ + Manages async operations on a single document in the Typesense API. + + This class provides async methods to retrieve, update, and delete a document. + + Attributes: + api_call (ApiCall): The ApiCall instance for making async API requests. + collection_name (str): The name of the collection. + document_id (str): The ID of the document. + """ + + def __init__( + self, + api_call: ApiCall, + collection_name: str, + document_id: str, + ) -> None: + """ + Initialize the Document instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + collection_name (str): The name of the collection. + document_id (str): The ID of the document. + """ + self.api_call = api_call + self.collection_name = collection_name + self.document_id = document_id + + def retrieve( + self, + retrieve_parameters: typing.Union[RetrieveParameters, None] = None, + ) -> TDoc: + """ + Retrieve this specific document. + + Args: + retrieve_parameters (Union[RetrieveParameters, None], optional): + Parameters for retrieving the document. + + Returns: + TDoc: The retrieved document. + """ + response = self.api_call.get( + endpoint=self._endpoint_path, + entity_type=typing.Dict[str, str], + as_json=True, + params=retrieve_parameters, + ) + return typing.cast(TDoc, response) + + def update( + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> TDoc: + """ + Update this specific document. + + Args: + document (TDoc): The updated document data. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The updated document. + """ + response = self.api_call.patch( + self._endpoint_path, + body=document, + params=dirty_values_parameters, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) + + def delete( + self, + delete_parameters: typing.Union[DeleteSingleDocumentParameters, None] = None, + ) -> TDoc: + """ + Delete this specific document. + + Args: + delete_parameters (Union[DeleteSingleDocumentParameters, None], optional): + Parameters for deletion. + + Returns: + TDoc: The deleted document. + """ + response = self.api_call.delete( + self._endpoint_path, + entity_type=typing.Dict[str, str], + params=delete_parameters, + ) + return typing.cast(TDoc, response) + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific document. + + Returns: + str: The constructed endpoint path. + """ + from .collections import Collections + from .documents import Documents + + return "/".join( + [ + Collections.resource_path, + self.collection_name, + Documents.resource_path, + self.document_id, + ], + ) diff --git a/src/typesense/sync/documents.py b/src/typesense/sync/documents.py new file mode 100644 index 0000000..b22ef69 --- /dev/null +++ b/src/typesense/sync/documents.py @@ -0,0 +1,453 @@ +""" +This module provides async functionality for managing documents in Typesense collections. + +It contains the Documents class, which allows for creating, updating, importing, exporting, +searching, and deleting documents asynchronously. + +Classes: + Documents: Manages async operations on documents in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_document: Provides the Document class for individual document operations. + - typesense.types.document: Provides various document schema types. + - typesense.preprocess: Provides stringify_search_params for search parameter processing. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import json +import sys + +from .api_call import ApiCall +from .document import Document +from typesense.exceptions import TypesenseClientError +from typesense.logger import logger +from typesense.preprocess import stringify_search_params +from typesense.types.document import ( + DeleteQueryParameters, + DeleteResponse, + DirtyValuesParameters, + DocumentExportParameters, + DocumentImportParameters, + DocumentImportParametersReturnDoc, + DocumentImportParametersReturnDocAndId, + DocumentImportParametersReturnId, + DocumentSchema, + DocumentWriteParameters, + ImportResponse, + ImportResponseFail, + ImportResponseSuccess, + ImportResponseWithDoc, + ImportResponseWithDocAndId, + ImportResponseWithId, + SearchParameters, + SearchResponse, + UpdateByFilterParameters, + UpdateByFilterResponse, +) + +# mypy: disable-error-code="misc" + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + +_ImportParameters = typing.Union[ + DocumentImportParameters, + None, +] + + +class Documents(typing.Generic[TDoc]): + """ + Manages async operations on documents in the Typesense API. + + This class provides async methods to interact with documents, including + creating, updating, importing, exporting, searching, and deleting them. + + Attributes: + resource_path (str): The API resource path for document operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + collection_name (str): The name of the collection. + documents (Dict[str, Document[TDoc]]): A dictionary of Document instances. + """ + + resource_path: typing.Final[str] = "documents" + + def __init__(self, api_call: ApiCall, collection_name: str) -> None: + """ + Initialize the Documents instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + collection_name (str): The name of the collection. + """ + self.api_call = api_call + self.collection_name = collection_name + self.documents: typing.Dict[str, Document[TDoc]] = {} + + def __getitem__(self, document_id: str) -> Document[TDoc]: + """ + Get or create an Document instance for a given document ID. + + Args: + document_id (str): The ID of the document. + + Returns: + Document[TDoc]: The Document instance for the specified document ID. + """ + if document_id not in self.documents: + self.documents[document_id] = Document( + self.api_call, + self.collection_name, + document_id, + ) + + return self.documents[document_id] + + def create( + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> TDoc: + """ + Create a new document in the collection. + + Args: + document (TDoc): The document to create. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The created document. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "create" + response = self.api_call.post( + self._endpoint_path(), + body=document, + params=dirty_values_parameters, + as_json=True, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) + + def create_many( + self, + documents: typing.List[TDoc], + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: + """ + Create multiple documents in the collection. + + Args: + documents (List[TDoc]): The list of documents to create. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + List[Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: + The list of import responses. + """ + logger.warn("`create_many` is deprecated: please use `import_`.") + return self.import_(documents, dirty_values_parameters) + + def upsert( + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, + ) -> TDoc: + """ + Create or update a document in the collection. + + Args: + document (TDoc): The document to upsert. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The upserted document. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "upsert" + response = self.api_call.post( + self._endpoint_path(), + body=document, + params=dirty_values_parameters, + as_json=True, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) + + def update( + self, + document: TDoc, + dirty_values_parameters: typing.Union[UpdateByFilterParameters, None] = None, + ) -> UpdateByFilterResponse: + """ + Update a document in the collection. + + Args: + document (TDoc): The document to update. + dirty_values_parameters (Union[UpdateByFilterParameters, None], optional): + Parameters for handling dirty values and filtering. + + Returns: + UpdateByFilterResponse: The response containing information about the update. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "update" + response: UpdateByFilterResponse = self.api_call.patch( + self._endpoint_path(), + body=document, + params=dirty_values_parameters, + entity_type=UpdateByFilterResponse, + ) + return response + + def import_jsonl(self, documents_jsonl: str) -> str: + """ + Import documents from a JSONL string. + + Args: + documents_jsonl (str): The JSONL string containing documents to import. + + Returns: + str: The import response as a string. + """ + logger.warning("`import_jsonl` is deprecated: please use `import_`.") + return self.import_(documents_jsonl) + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + import_parameters: DocumentImportParametersReturnDocAndId, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ + typing.Union[ImportResponseWithDocAndId[TDoc], ImportResponseFail[TDoc]] + ]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + import_parameters: DocumentImportParametersReturnId, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[typing.Union[ImportResponseWithId, ImportResponseFail[TDoc]]]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + import_parameters: typing.Union[DocumentWriteParameters, None] = None, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + import_parameters: DocumentImportParametersReturnDoc, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ + typing.Union[ImportResponseWithDoc[TDoc], ImportResponseFail[TDoc]] + ]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ImportResponse[TDoc]]: ... + + @typing.overload + def import_( + self, + documents: typing.Union[bytes, str], + import_parameters: _ImportParameters = None, + batch_size: typing.Union[int, None] = None, + ) -> str: ... + + def import_( + self, + documents: typing.Union[bytes, str, typing.List[TDoc]], + import_parameters: _ImportParameters = None, + batch_size: typing.Union[int, None] = None, + ) -> typing.Union[ImportResponse[TDoc], str]: + """ + Import documents into the collection. + + This method supports various input types and import parameters. + It can handle both individual documents and batches of documents. + + Args: + documents: The documents to import. + import_parameters: Parameters for the import operation. + batch_size: The size of each batch for batch imports. + + Returns: + The import response, which can be a list of responses or a string. + + Raises: + TypesenseClientError: If an empty list of documents is provided. + """ + if isinstance(documents, (str, bytes)): + return self._import_raw(documents, import_parameters) + + if batch_size: + return self._batch_import(documents, import_parameters, batch_size) + + return self._bulk_import(documents, import_parameters) + + def export( + self, + export_parameters: typing.Union[DocumentExportParameters, None] = None, + ) -> str: + """ + Export documents from the collection. + + Args: + export_parameters (Union[DocumentExportParameters, None], optional): + Parameters for the export operation. + + Returns: + str: The exported documents as a string. + """ + api_response: str = self.api_call.get( + self._endpoint_path("export"), + params=export_parameters, + as_json=False, + entity_type=str, + ) + return api_response + + def search(self, search_parameters: SearchParameters) -> SearchResponse[TDoc]: + """ + Search for documents in the collection. + + Args: + search_parameters (SearchParameters): The search parameters. + + Returns: + SearchResponse[TDoc]: The search response containing matching documents. + """ + stringified_search_params = stringify_search_params(search_parameters) + response: SearchResponse[TDoc] = self.api_call.get( + self._endpoint_path("search"), + params=stringified_search_params, + entity_type=SearchResponse, + as_json=True, + ) + return response + + def delete( + self, + delete_parameters: typing.Union[DeleteQueryParameters, None] = None, + ) -> DeleteResponse: + """ + Delete documents from the collection based on given parameters. + + Args: + delete_parameters (Union[DeleteQueryParameters, None], optional): + Parameters for deletion. + + Returns: + DeleteResponse: The response containing information about the deletion. + """ + response: DeleteResponse = self.api_call.delete( + self._endpoint_path(), + params=delete_parameters, + entity_type=DeleteResponse, + ) + return response + + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for document operations. + + Args: + action (Union[str, None], optional): The action to perform. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from .collections import Collections + + action = action or "" + return "/".join( + [ + Collections.resource_path, + self.collection_name, + self.resource_path, + action, + ], + ) + + def _import_raw( + self, + documents: typing.Union[bytes, str], + import_parameters: _ImportParameters, + ) -> str: + """Import raw document data.""" + response: str = self.api_call.post( + self._endpoint_path("import"), + body=documents, + params=import_parameters, + as_json=False, + entity_type=str, + ) + + return response + + def _batch_import( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + batch_size: int, + ) -> ImportResponse[TDoc]: + """Import documents in batches.""" + response_objs: ImportResponse[TDoc] = [] + for batch_index in range(0, len(documents), batch_size): + batch = documents[batch_index : batch_index + batch_size] + api_response = self._bulk_import(batch, import_parameters) + response_objs.extend(api_response) + return response_objs + + def _bulk_import( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + ) -> ImportResponse[TDoc]: + """Import a list of documents in bulk.""" + document_strs = [json.dumps(doc) for doc in documents] + if not document_strs: + raise TypesenseClientError("Cannot import an empty list of documents.") + + docs_import = "\n".join(document_strs) + res = self.api_call.post( + self._endpoint_path("import"), + body=docs_import, + params=import_parameters, + entity_type=str, + as_json=False, + ) + return self._parse_import_response(res) + + def _parse_import_response(self, response: str) -> ImportResponse[TDoc]: + """Parse the import response string into a list of response objects.""" + response_objs: typing.List[ImportResponse] = [] + for res_obj_str in response.split("\n"): + try: + res_obj_json = json.loads(res_obj_str) + except json.JSONDecodeError as decode_error: + raise TypesenseClientError( + f"Invalid response - {res_obj_str}", + ) from decode_error + response_objs.append(res_obj_json) + return response_objs diff --git a/src/typesense/sync/key.py b/src/typesense/sync/key.py new file mode 100644 index 0000000..e90ec3d --- /dev/null +++ b/src/typesense/sync/key.py @@ -0,0 +1,80 @@ +""" +This module provides async functionality for managing individual API keys in Typesense. + +It contains the Key class, which allows for retrieving and deleting +API keys asynchronously. + +Classes: + Key: Manages async operations on a single API key in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.key: Provides ApiKeyDeleteSchema and ApiKeySchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import ApiCall +from typesense.types.key import ApiKeyDeleteSchema, ApiKeySchema + + +class Key: + """ + Manages async operations on a single API key in the Typesense API. + + This class provides async methods to retrieve and delete an API key. + + Attributes: + key_id (int): The ID of the API key. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + def __init__(self, api_call: ApiCall, key_id: int) -> None: + """ + Initialize the Key instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + key_id (int): The ID of the API key. + """ + self.key_id = key_id + self.api_call = api_call + + def retrieve(self) -> ApiKeySchema: + """ + Retrieve this specific API key. + + Returns: + ApiKeySchema: The schema containing the API key details. + """ + response: ApiKeySchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=ApiKeySchema, + ) + return response + + def delete(self) -> ApiKeyDeleteSchema: + """ + Delete this specific API key. + + Returns: + ApiKeyDeleteSchema: The schema containing the deletion response. + """ + response: ApiKeyDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=ApiKeyDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific API key. + + Returns: + str: The constructed endpoint path. + """ + from .keys import Keys + + return "/".join([Keys.resource_path, str(self.key_id)]) diff --git a/src/typesense/sync/keys.py b/src/typesense/sync/keys.py new file mode 100644 index 0000000..b70ec5e --- /dev/null +++ b/src/typesense/sync/keys.py @@ -0,0 +1,170 @@ +""" +This module provides async functionality for managing API keys in Typesense. + +It contains the Keys class, which allows for creating, retrieving, and +generating scoped search keys asynchronously. + +Classes: + Keys: Manages API keys in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_key: Provides the Key class for individual API key operations. + - typesense.types.document: Provides GenerateScopedSearchKeyParams type. + - typesense.types.key: Provides various API key schema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import base64 +import hashlib +import hmac +import json +import sys + +from .api_call import ApiCall +from .key import Key +from typesense.types.document import GenerateScopedSearchKeyParams +from typesense.types.key import ( + ApiKeyCreateResponseSchema, + ApiKeyCreateSchema, + ApiKeyRetrieveSchema, + ApiKeySchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class Keys: + """ + Manages API keys in the Typesense API (async). + + This class provides async methods to create, retrieve, and generate scoped search keys. + + Attributes: + resource_path (str): The API endpoint path for key operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + keys (Dict[int, Key]): A dictionary of Key instances, keyed by key ID. + """ + + resource_path: typing.Final[str] = "/keys" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the Keys instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + self.keys: typing.Dict[int, Key] = {} + + def __getitem__(self, key_id: int) -> Key: + """ + Get or create an Key instance for a given key ID. + + This method allows accessing API keys using dictionary-like syntax. + If the Key instance doesn't exist, it creates a new one. + + Args: + key_id (int): The ID of the API key. + + Returns: + Key: The Key instance for the specified key ID. + + Example: + >>> keys = Keys(async_api_call) + >>> key = keys[1] + """ + if not self.keys.get(key_id): + self.keys[key_id] = Key(self.api_call, key_id) + return self.keys[key_id] + + def create(self, schema: ApiKeyCreateSchema) -> ApiKeyCreateResponseSchema: + """ + Create a new API key. + + Args: + schema (ApiKeyCreateSchema): The schema for creating the API key. + + Returns: + ApiKeyCreateResponseSchema: The created API key. + + Example: + >>> keys = Keys(async_api_call) + >>> key = await keys.create( + ... { + ... "actions": ["documents:search"], + ... "collections": ["companies"], + ... "description": "Search-only key", + ... } + ... ) + """ + response: ApiKeySchema = self.api_call.post( + Keys.resource_path, + as_json=True, + body=schema, + entity_type=ApiKeySchema, + ) + return response + + def generate_scoped_search_key( + self, + search_key: str, + key_parameters: GenerateScopedSearchKeyParams, + ) -> bytes: + """ + Generate a scoped search key. + + Note: This is a synchronous method as it performs local computation + and does not make any API calls. Only a key generated with the + `documents:search` action will be accepted by the server. + + Args: + search_key (str): The search key to use as a base. + key_parameters (GenerateScopedSearchKeyParams): Parameters for the scoped key. + + Returns: + bytes: The generated scoped search key. + + Example: + >>> keys = Keys(async_api_call) + >>> scoped_key = keys.generate_scoped_search_key( + ... "KmacipDKNqAM3YiigXfw5pZvNOrPQUba", + ... {"q": "search query", "collection": "companies"}, + ... ) + """ + params_str = json.dumps(key_parameters) + digest = base64.b64encode( + hmac.new( + search_key.encode("utf-8"), + params_str.encode("utf-8"), + digestmod=hashlib.sha256, + ).digest(), + ) + key_prefix = search_key[:4] + raw_scoped_key = f"{digest.decode('utf-8')}{key_prefix}{params_str}" + return base64.b64encode(raw_scoped_key.encode("utf-8")) + + def retrieve(self) -> ApiKeyRetrieveSchema: + """ + Retrieve all API keys. + + Returns: + ApiKeyRetrieveSchema: The schema containing all API keys. + + Example: + >>> keys = Keys(async_api_call) + >>> all_keys = await keys.retrieve() + >>> for key in all_keys["keys"]: + ... print(key["id"]) + """ + response: ApiKeyRetrieveSchema = self.api_call.get( + Keys.resource_path, + entity_type=ApiKeyRetrieveSchema, + as_json=True, + ) + return response diff --git a/src/typesense/sync/metrics.py b/src/typesense/sync/metrics.py new file mode 100644 index 0000000..d25f6fd --- /dev/null +++ b/src/typesense/sync/metrics.py @@ -0,0 +1,69 @@ +""" +This module provides async functionality for retrieving metrics from the Typesense API. + +It contains the Metrics class, which handles async API operations for retrieving +system and Typesense metrics such as CPU, memory, disk, and network usage. + +Classes: + MetricsResponse: Type definition for metrics response (imported from typesense.types.metrics). + Metrics: Manages async retrieval of metrics from the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.metrics: Provides MetricsResponse type definitions. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from typesense.types.metrics import MetricsResponse + + +class Metrics: + """ + Manages async metrics retrieval from the Typesense API. + + This class provides async methods to retrieve system and Typesense metrics + such as CPU, memory, disk, and network usage. + + Attributes: + resource_path (str): The base path for metrics endpoint. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/metrics.json" + + def __init__(self, api_call: ApiCall): + """ + Initialize the Metrics instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + + def retrieve(self) -> MetricsResponse: + """ + Retrieve metrics from the Typesense API. + + Returns: + MetricsResponse: A dictionary containing system and Typesense metrics. + + Example: + >>> metrics = Metrics(async_api_call) + >>> response = await metrics.retrieve() + >>> print(response["system_cpu_active_percentage"]) + """ + response: MetricsResponse = self.api_call.get( + Metrics.resource_path, + as_json=True, + entity_type=MetricsResponse, + ) + return response diff --git a/src/typesense/sync/multi_search.py b/src/typesense/sync/multi_search.py new file mode 100644 index 0000000..2c81be6 --- /dev/null +++ b/src/typesense/sync/multi_search.py @@ -0,0 +1,108 @@ +""" +This module provides async functionality for performing multi-search operations in the Typesense API. + +It contains the MultiSearch class, which allows for executing multiple search queries +asynchronously in a single API call. + +Classes: + MultiSearch: Manages async multi-search operations in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.preprocess: Provides the stringify_search_params function for parameter processing. + - typesense.types.document: Provides the MultiSearchCommonParameters type. + - typesense.types.multi_search: Provides MultiSearchRequestSchema and MultiSearchResponse types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import ApiCall +from typesense.preprocess import stringify_search_params +from typesense.types.document import MultiSearchCommonParameters +from typesense.types.multi_search import MultiSearchRequestSchema, MultiSearchResponse + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class MultiSearch: + """ + Manages async multi-search operations in the Typesense API. + + This class provides async methods to perform multiple search queries in a single API call. + + Attributes: + resource_path (str): The API endpoint path for multi-search operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/multi_search" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the MultiSearch instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + + def perform( + self, + search_queries: MultiSearchRequestSchema, + common_params: typing.Union[MultiSearchCommonParameters, None] = None, + ) -> MultiSearchResponse: + """ + Perform a multi-search operation. + + This method allows executing multiple search queries in a single API call. + It processes the search parameters, sends the request to the Typesense API, + and returns the multi-search response. + + Args: + search_queries (MultiSearchRequestSchema): + A dictionary containing the list of search queries to perform. + The dictionary should have a 'searches' key with a list of search + parameter dictionaries. + common_params (Union[MultiSearchCommonParameters, None], optional): + Common parameters to apply to all search queries. Defaults to None. + + Returns: + MultiSearchResponse: + The response from the multi-search operation, containing + the results of all search queries. + + Example: + >>> multi_search = MultiSearch(async_api_call) + >>> response = await multi_search.perform( + ... { + ... "searches": [ + ... { + ... "q": "com", + ... "query_by": "company_name", + ... "collection": "companies", + ... }, + ... ], + ... } + ... ) + """ + stringified_search_params = [ + stringify_search_params(search_params) + for search_params in search_queries.get("searches") + ] + search_body = { + "searches": stringified_search_params, + "union": search_queries.get("union", False), + } + response: MultiSearchResponse = self.api_call.post( + MultiSearch.resource_path, + body=search_body, + params=common_params, + as_json=True, + entity_type=MultiSearchResponse, + ) + return response diff --git a/src/typesense/sync/nl_search_model.py b/src/typesense/sync/nl_search_model.py new file mode 100644 index 0000000..88bac30 --- /dev/null +++ b/src/typesense/sync/nl_search_model.py @@ -0,0 +1,102 @@ +""" +This module provides async functionality for managing individual NL search models in Typesense. + +It contains the NLSearchModel class, which allows for retrieving, updating, and deleting +NL search models asynchronously. + +Classes: + NLSearchModel: Manages async operations on a single NL search model in the Typesense API. + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.types.nl_search_model: Provides NLSearchModelDeleteSchema, NLSearchModelSchema, and NLSearchModelUpdateSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +from .api_call import ApiCall +from typesense.types.nl_search_model import ( + NLSearchModelDeleteSchema, + NLSearchModelSchema, + NLSearchModelUpdateSchema, +) + + +class NLSearchModel: + """ + Manages async operations on a single NL search model in the Typesense API. + + This class provides async methods to retrieve, update, and delete an NL search model. + + Attributes: + model_id (str): The ID of the NL search model. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + def __init__(self, api_call: ApiCall, model_id: str) -> None: + """ + Initialize the NLSearchModel instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + model_id (str): The ID of the NL search model. + """ + self.model_id = model_id + self.api_call = api_call + + def retrieve(self) -> NLSearchModelSchema: + """ + Retrieve this specific NL search model. + + Returns: + NLSearchModelSchema: The schema containing the NL search model details. + """ + response: NLSearchModelSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=NLSearchModelSchema, + ) + return response + + def update(self, model: NLSearchModelUpdateSchema) -> NLSearchModelSchema: + """ + Update this specific NL search model. + + Args: + model (NLSearchModelUpdateSchema): + The schema containing the updated model details. + + Returns: + NLSearchModelSchema: The schema containing the updated NL search model. + """ + response: NLSearchModelSchema = self.api_call.put( + self._endpoint_path, + body=model, + entity_type=NLSearchModelSchema, + ) + return response + + def delete(self) -> NLSearchModelDeleteSchema: + """ + Delete this specific NL search model. + + Returns: + NLSearchModelDeleteSchema: The schema containing the deletion response. + """ + response: NLSearchModelDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=NLSearchModelDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific NL search model. + + Returns: + str: The constructed endpoint path. + """ + from .nl_search_models import NLSearchModels + + return "/".join([NLSearchModels.resource_path, self.model_id]) diff --git a/src/typesense/sync/nl_search_models.py b/src/typesense/sync/nl_search_models.py new file mode 100644 index 0000000..a31ca32 --- /dev/null +++ b/src/typesense/sync/nl_search_models.py @@ -0,0 +1,130 @@ +""" +This module provides async functionality for managing NL search models in Typesense. + +It contains the NLSearchModels class, which allows for creating, retrieving, and +accessing individual NL search models asynchronously. + +Classes: + NLSearchModels: Manages NL search models in the Typesense API (async). + +Dependencies: + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + - typesense.async_nl_search_model: Provides the NLSearchModel class for individual NL search model operations. + - typesense.types.nl_search_model: Provides NLSearchModelCreateSchema, NLSearchModelSchema, and NLSearchModelsRetrieveSchema types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import ApiCall +from .nl_search_model import NLSearchModel +from typesense.types.nl_search_model import ( + NLSearchModelCreateSchema, + NLSearchModelSchema, + NLSearchModelsRetrieveSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class NLSearchModels: + """ + Manages NL search models in the Typesense API (async). + + This class provides async methods to create, retrieve, and access individual NL search models. + + Attributes: + resource_path (str): The API endpoint path for NL search models operations. + api_call (ApiCall): The ApiCall instance for making async API requests. + nl_search_models (Dict[str, NLSearchModel]): + A dictionary of NLSearchModel instances, keyed by model ID. + """ + + resource_path: typing.Final[str] = "/nl_search_models" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the NLSearchModels instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + self.nl_search_models: typing.Dict[str, NLSearchModel] = {} + + def __getitem__(self, model_id: str) -> NLSearchModel: + """ + Get or create an NLSearchModel instance for a given model ID. + + This method allows accessing NL search models using dictionary-like syntax. + If the NLSearchModel instance doesn't exist, it creates a new one. + + Args: + model_id (str): The ID of the NL search model. + + Returns: + NLSearchModel: The NLSearchModel instance for the specified model ID. + + Example: + >>> nl_search_models = NLSearchModels(async_api_call) + >>> model = nl_search_models["model_id"] + """ + if model_id not in self.nl_search_models: + self.nl_search_models[model_id] = NLSearchModel( + self.api_call, + model_id, + ) + return self.nl_search_models[model_id] + + def create(self, model: NLSearchModelCreateSchema) -> NLSearchModelSchema: + """ + Create a new NL search model. + + Args: + model (NLSearchModelCreateSchema): + The schema for creating the NL search model. + + Returns: + NLSearchModelSchema: The created NL search model. + + Example: + >>> nl_search_models = NLSearchModels(async_api_call) + >>> model = await nl_search_models.create( + ... { + ... "api_key": "key", + ... "model_name": "openai/gpt-3.5-turbo", + ... "system_prompt": "System prompt", + ... } + ... ) + """ + response: NLSearchModelSchema = self.api_call.post( + endpoint=NLSearchModels.resource_path, + entity_type=NLSearchModelSchema, + as_json=True, + body=model, + ) + return response + + def retrieve(self) -> NLSearchModelsRetrieveSchema: + """ + Retrieve all NL search models. + + Returns: + NLSearchModelsRetrieveSchema: A list of all NL search models. + + Example: + >>> nl_search_models = NLSearchModels(async_api_call) + >>> all_models = await nl_search_models.retrieve() + >>> for model in all_models: + ... print(model["id"]) + """ + response: NLSearchModelsRetrieveSchema = self.api_call.get( + endpoint=NLSearchModels.resource_path, + entity_type=NLSearchModelsRetrieveSchema, + as_json=True, + ) + return response diff --git a/src/typesense/sync/operations.py b/src/typesense/sync/operations.py new file mode 100644 index 0000000..e560b76 --- /dev/null +++ b/src/typesense/sync/operations.py @@ -0,0 +1,279 @@ +""" +This module provides async functionality for performing various operations in the Typesense API. + +It contains the Operations class, which handles different API operations such as +health checks, snapshots, and configuration changes asynchronously. + +Classes: + Operations: Manages various async operations in the Typesense API. + +Dependencies: + - typesense.types.operations: + Provides type definitions for operation responses and parameters. + - typesense.async_api_call: Provides the ApiCall class for making async API requests. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +from .api_call import ApiCall +from typesense.types.operations import ( + HealthCheckResponse, + LogSlowRequestsTimeParams, + OperationResponse, + SchemaChangesResponse, + SnapshotParameters, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class Operations: + """ + Manages various async operations in the Typesense API. + + This class provides async methods to perform different operations such as + health checks, snapshots, and configuration changes. + + Attributes: + resource_path (str): The base path for operations endpoints. + health_path (str): The path for the health check endpoint. + config_path (str): The path for the configuration endpoint. + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + + resource_path: typing.Final[str] = "/operations" + health_path: typing.Final[str] = "/health" + config_path: typing.Final[str] = "/config" + schema_changes: typing.Final[str] = "/schema_changes" + + def __init__(self, api_call: ApiCall): + """ + Initialize the Operations instance. + + Args: + api_call (ApiCall): The ApiCall instance for making async API requests. + """ + self.api_call = api_call + + @typing.overload + def perform( + self, + operation_name: typing.Literal["schema_changes"], + query_params: None = None, + ) -> typing.List[SchemaChangesResponse]: + """ + Perform a schema_changes operation. + + Args: + operation_name (Literal["schema_changes"]): The name of the operation. + query_params (None, optional): Query parameters (not used for schema_changes operation). + + Returns: + List[SchemaChangesResponse]: The response from the schema_changes operation. + """ + + @typing.overload + def perform( + self, + operation_name: typing.Literal["vote"], + query_params: None = None, + ) -> OperationResponse: + """ + Perform a vote operation. + + Args: + operation_name (Literal["vote"]): The name of the operation. + query_params (None, optional): Query parameters (not used for vote operation). + + Returns: + OperationResponse: The response from the vote operation. + """ + + @typing.overload + def perform( + self, + operation_name: typing.Literal["db/compact"], + query_params: None = None, + ) -> OperationResponse: + """ + Perform a database compaction operation. + + Args: + operation_name (Literal["db/compact"]): The name of the operation. + query_params (None, optional): Query parameters (not used for db/compact operation). + + Returns: + OperationResponse: The response from the database compaction operation. + """ + + @typing.overload + def perform( + self, + operation_name: typing.Literal["cache/clear"], + query_params: None = None, + ) -> OperationResponse: + """ + Perform a cache clear operation. + + Args: + operation_name (Literal["cache/clear"]): The name of the operation. + query_params (None, optional): + Query parameters (not used for cache/clear operation). + + Returns: + OperationResponse: The response from the cache clear operation. + """ + + @typing.overload + def perform( + self, + operation_name: str, + query_params: typing.Union[typing.Dict[str, str], None] = None, + ) -> OperationResponse: + """ + Perform a generic operation. + + Args: + operation_name (str): The name of the operation. + query_params (Union[Dict[str, str], None], optional): + Query parameters for the operation. + + Returns: + OperationResponse: The response from the operation. + """ + + @typing.overload + def perform( + self, + operation_name: typing.Literal["snapshot"], + query_params: SnapshotParameters, + ) -> OperationResponse: + """ + Perform a snapshot operation. + + Args: + operation_name (Literal["snapshot"]): The name of the operation. + query_params (SnapshotParameters): Query parameters for the snapshot operation. + + Returns: + OperationResponse: The response from the snapshot operation. + """ + + def perform( + self, + operation_name: typing.Union[ + typing.Literal[ + "snapshot", + "vote", + "db/compact", + "cache/clear", + "schema_changes", + ], + str, + ], + query_params: typing.Union[ + SnapshotParameters, + typing.Dict[str, str], + None, + ] = None, + ) -> OperationResponse: + """ + Perform an operation on the Typesense API. + + This method is the actual implementation for all the overloaded perform methods. + + Args: + operation_name (Literal["snapshot, vote, db/compact, cache/clear, schema_changes"]): + The name of the operation to perform. + query_params (Union[SnapshotParameters, Dict[str, str], None], optional): + Query parameters for the operation. + + Returns: + Union[OperationResponse, List[SchemaChangesResponse]]: + The response from the performed operation. + + Example: + >>> operations = Operations(async_api_call) + >>> response = await operations.perform("vote") + >>> health = await operations.is_healthy() + """ + response: OperationResponse = self.api_call.post( + self._endpoint_path(operation_name), + params=query_params, + as_json=True, + entity_type=OperationResponse, + ) + return response + + def is_healthy(self) -> bool: + """ + Check if the Typesense server is healthy. + + Returns: + bool: True if the server is healthy, False otherwise. + + Example: + >>> operations = Operations(async_api_call) + >>> healthy = await operations.is_healthy() + >>> print(healthy) + """ + call_resp: HealthCheckResponse = self.api_call.get( + Operations.health_path, + as_json=True, + entity_type=HealthCheckResponse, + ) + if isinstance(call_resp, typing.Dict): + is_ok: bool = call_resp.get("ok", False) + else: + is_ok = False + return is_ok + + def toggle_slow_request_log( + self, + log_slow_requests_time_params: LogSlowRequestsTimeParams, + ) -> typing.Dict[str, typing.Union[str, bool]]: + """ + Toggle the slow request log configuration. + + Args: + log_slow_requests_time_params (LogSlowRequestsTimeParams): + Parameters for configuring slow request logging. + + Returns: + Dict[str, Union[str, bool]]: The response from the configuration change operation. + + Example: + >>> operations = Operations(async_api_call) + >>> response = await operations.toggle_slow_request_log( + ... {"log_slow_requests_time_ms": 100} + ... ) + """ + data_dashed = { + key.replace("_", "-"): dashed_value + for key, dashed_value in log_slow_requests_time_params.items() + } + response: typing.Dict[str, typing.Union[str, bool]] = self.api_call.post( + Operations.config_path, + as_json=True, + entity_type=typing.Dict[str, typing.Union[str, bool]], + body=data_dashed, + ) + return response + + @staticmethod + def _endpoint_path(operation_name: str) -> str: + """ + Generate the endpoint path for a given operation. + + Args: + operation_name (str): The name of the operation. + + Returns: + str: The full endpoint path for the operation. + """ + return "/".join([Operations.resource_path, operation_name]) diff --git a/src/typesense/sync/override.py b/src/typesense/sync/override.py new file mode 100644 index 0000000..8a24e9e --- /dev/null +++ b/src/typesense/sync/override.py @@ -0,0 +1,112 @@ +""" +This module provides async functionality for managing individual overrides in Typesense. + +Classes: + - Override: Handles async operations related to a specific override within a collection. + +Methods: + - __init__: Initializes the Override object. + - retrieve: Retrieves the details of this specific override. + - delete: Deletes this specific override. + +Attributes: + - _endpoint_path: The API endpoint path for this specific override. + +The Override class interacts with the Typesense API to manage operations on a +specific override within a collection. It provides methods to retrieve and delete +individual overrides. + +For more information regarding Overrides, refer to the Curation [documentation] +(https://typesense.org/docs/27.0/api/curation.html#curation). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import ApiCall +from typesense.logger import warn_deprecation +from typesense.types.override import OverrideDeleteSchema, OverrideSchema + + +class Override: + """ + Class for managing individual overrides in a Typesense collection (async). + + This class provides methods to interact with a specific override, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + override_id (str): The ID of the override. + """ + + def __init__( + self, + api_call: ApiCall, + collection_name: str, + override_id: str, + ) -> None: + """ + Initialize the Override object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + override_id (str): The ID of the override. + """ + self.api_call = api_call + self.collection_name = collection_name + self.override_id = override_id + + def retrieve(self) -> OverrideSchema: + """ + Retrieve this specific override. + + Returns: + OverrideSchema: The schema containing the override details. + """ + response: OverrideSchema = self.api_call.get( + self._endpoint_path, + entity_type=OverrideSchema, + as_json=True, + ) + return response + + def delete(self) -> OverrideDeleteSchema: + """ + Delete this specific override. + + Returns: + OverrideDeleteSchema: The schema containing the deletion response. + """ + response: OverrideDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=OverrideDeleteSchema, + ) + return response + + @property + @warn_deprecation( # type: ignore[untyped-decorator] + "The override API (collections/{collection}/overrides/{override_id}) is deprecated is removed on v30+. " + "Use curation sets (curation_sets) instead.", + flag_name="overrides_deprecation", + ) + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific override. + + Returns: + str: The constructed endpoint path. + """ + from .collections import Collections + from .overrides import Overrides + + return "/".join( + [ + Collections.resource_path, + self.collection_name, + Overrides.resource_path, + self.override_id, + ], + ) diff --git a/src/typesense/sync/overrides.py b/src/typesense/sync/overrides.py new file mode 100644 index 0000000..7682ff5 --- /dev/null +++ b/src/typesense/sync/overrides.py @@ -0,0 +1,157 @@ +""" +This module provides async functionality for managing overrides in Typesense. + +Classes: + - Overrides: Handles async operations related to overrides within a collection. + +Methods: + - __init__: Initializes the Overrides object. + - __getitem__: Retrieves or creates an Override object for a given override_id. + - _endpoint_path: Constructs the API endpoint path for override operations. + - upsert: Creates or updates an override. + - retrieve: Retrieves all overrides for the collection. + +Attributes: + - RESOURCE_PATH: The API resource path for overrides. + +The Overrides class interacts with the Typesense API to manage override operations +within a specific collection. It provides methods to create, update, and retrieve +overrides, as well as access individual Override objects. + +For more information regarding Overrides, refer to the Curation [documentation] +(https://typesense.org/docs/27.0/api/curation.html#curation). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typing_extensions import deprecated + +from .api_call import ApiCall +from .override import Override +from typesense.logger import warn_deprecation +from typesense.types.override import ( + OverrideCreateSchema, + OverrideRetrieveSchema, + OverrideSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +@deprecated("SyncOverrides is deprecated on v30+. Use client.curation_sets instead.") +class Overrides: + """ + Class for managing overrides in a Typesense collection (async). + + This class provides methods to interact with overrides, including + retrieving, creating, and updating them. + + Attributes: + RESOURCE_PATH (str): The API resource path for overrides. + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + overrides (Dict[str, Override]): A dictionary of Override objects. + """ + + resource_path: typing.Final[str] = "overrides" + + def __init__( + self, + api_call: ApiCall, + collection_name: str, + ) -> None: + """ + Initialize the Overrides object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ + self.api_call = api_call + self.collection_name = collection_name + self.overrides: typing.Dict[str, Override] = {} + + def __getitem__(self, override_id: str) -> Override: + """ + Get or create an Override object for a given override_id. + + Args: + override_id (str): The ID of the override. + + Returns: + Override: The Override object for the given ID. + """ + if not self.overrides.get(override_id): + self.overrides[override_id] = Override( + self.api_call, + self.collection_name, + override_id, + ) + return self.overrides[override_id] + + def upsert( + self, override_id: str, schema: OverrideCreateSchema + ) -> OverrideSchema: + """ + Create or update an override. + + Args: + id (str): The ID of the override. + schema (OverrideCreateSchema): The schema for creating or updating the override. + + Returns: + OverrideSchema: The created or updated override. + """ + response: OverrideSchema = self.api_call.put( + endpoint=self._endpoint_path(override_id), + entity_type=OverrideSchema, + body=schema, + ) + return response + + def retrieve(self) -> OverrideRetrieveSchema: + """ + Retrieve all overrides for the collection. + + Returns: + OverrideRetrieveSchema: The schema containing all overrides. + """ + response: OverrideRetrieveSchema = self.api_call.get( + self._endpoint_path(), + entity_type=OverrideRetrieveSchema, + as_json=True, + ) + return response + + @warn_deprecation( # type: ignore[untyped-decorator] + "SyncOverrides is deprecated on v30+. Use client.curation_sets instead.", + flag_name="overrides_deprecation", + ) + def _endpoint_path(self, override_id: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for override operations. + + Args: + override_id (Union[str, None], optional): The ID of the override. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from .collections import Collections + + override_id = override_id or "" + + return "/".join( + [ + Collections.resource_path, + self.collection_name, + Overrides.resource_path, + override_id, + ], + ) diff --git a/src/typesense/sync/stemming.py b/src/typesense/sync/stemming.py new file mode 100644 index 0000000..9653364 --- /dev/null +++ b/src/typesense/sync/stemming.py @@ -0,0 +1,50 @@ +""" +Module for managing stemming dictionaries in Typesense (async). + +This module provides a class for managing stemming dictionaries in Typesense, +including creating, updating, and retrieving them asynchronously. + +Classes: + - Stemming: Handles async operations related to stemming dictionaries. + +Attributes: + - StemmingDictionaries: The StemmingDictionaries object for managing stemming dictionaries. + +Methods: + - __init__: Initializes the Stemming object. + +The Stemming class interacts with the Typesense API to manage stemming dictionary operations. +It provides access to the StemmingDictionaries object for managing stemming dictionaries. + +For more information on stemming dictionaries, refer to the Stemming +[documentation](https://typesense.org/docs/28.0/api/stemming.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import ApiCall +from .stemming_dictionaries import StemmingDictionaries + + +class Stemming(object): + """ + Class for managing stemming dictionaries in Typesense (async). + + This class provides methods to interact with stemming dictionaries, including + creating, updating, and retrieving them. + + Attributes: + dictionaries (StemmingDictionaries): The StemmingDictionaries object for managing + stemming dictionaries. + """ + + def __init__(self, api_call: ApiCall): + """ + Initialize the Stemming object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.dictionaries = StemmingDictionaries(api_call) diff --git a/src/typesense/sync/stemming_dictionaries.py b/src/typesense/sync/stemming_dictionaries.py new file mode 100644 index 0000000..071d8c3 --- /dev/null +++ b/src/typesense/sync/stemming_dictionaries.py @@ -0,0 +1,185 @@ +""" +Module for interacting with the stemming dictionaries endpoint of the Typesense API (async). + +This module provides a class for managing stemming dictionaries in Typesense, including creating +and updating them asynchronously. + +Classes: + - StemmingDictionaries: Handles async operations related to stemming dictionaries. + +Methods: + - __init__: Initializes the StemmingDictionaries object. + - __getitem__: Retrieves or creates an StemmingDictionary object for a given dictionary_id. + - upsert: Creates or updates a stemming dictionary. + - _upsert_list: Creates or updates a list of stemming dictionaries. + - _dump_to_jsonl: Dumps a list of StemmingDictionaryCreateSchema objects to a JSONL string. + - _parse_response: Parses the response from the upsert operation. + - _upsert_raw: Performs the raw upsert operation. + - _endpoint_path: Constructs the API endpoint path for this specific stemming dictionary. + +The StemmingDictionaries class interacts with the Typesense API to manage stemming dictionary +operations. It provides methods to create, update, and retrieve stemming dictionaries, as well as +access individual StemmingDictionary objects. + +For more information on stemming dictionaries, +refer to the Stemming [documentation](https://typesense.org/docs/28.0/api/stemming.html) +""" + +import json +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from .stemming_dictionary import StemmingDictionary +from typesense.types.stemming import ( + StemmingDictionariesRetrieveSchema, + StemmingDictionaryCreateSchema, +) + + +class StemmingDictionaries: + """ + Class for managing stemming dictionaries in Typesense (async). + + This class provides methods to interact with stemming dictionaries, including + creating, updating, and retrieving them. + + Attributes: + api_call (ApiCall): The API call object for making requests. + stemming_dictionaries (Dict[str, StemmingDictionary]): A dictionary of + StemmingDictionary objects. + """ + + resource_path: typing.Final[str] = "/stemming/dictionaries" + + def __init__(self, api_call: ApiCall): + """ + Initialize the StemmingDictionaries object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.stemming_dictionaries: typing.Dict[str, StemmingDictionary] = {} + + def __getitem__(self, dictionary_id: str) -> StemmingDictionary: + """ + Get or create an StemmingDictionary object for a given dictionary_id. + + Args: + dictionary_id (str): The ID of the stemming dictionary. + + Returns: + StemmingDictionary: The StemmingDictionary object for the given ID. + """ + if not self.stemming_dictionaries.get(dictionary_id): + self.stemming_dictionaries[dictionary_id] = StemmingDictionary( + self.api_call, + dictionary_id, + ) + return self.stemming_dictionaries[dictionary_id] + + def retrieve(self) -> StemmingDictionariesRetrieveSchema: + """ + Retrieve the list of stemming dictionaries. + + Returns: + StemmingDictionariesRetrieveSchema: The list of stemming dictionaries. + """ + response: StemmingDictionariesRetrieveSchema = self.api_call.get( + self._endpoint_path(), + entity_type=StemmingDictionariesRetrieveSchema, + ) + return response + + @typing.overload + def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.Union[str, bytes], + ) -> str: ... + + @typing.overload + def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> typing.List[StemmingDictionaryCreateSchema]: ... + + def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.Union[ + typing.List[StemmingDictionaryCreateSchema], + str, + bytes, + ], + ) -> typing.Union[str, typing.List[StemmingDictionaryCreateSchema]]: + if isinstance(word_root_combinations, (str, bytes)): + return self._upsert_raw(dictionary_id, word_root_combinations) + + return self._upsert_list(dictionary_id, word_root_combinations) + + def _upsert_list( + self, + dictionary_id: str, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> typing.List[StemmingDictionaryCreateSchema]: + word_combos_in_jsonl = self._dump_to_jsonl(word_root_combinations) + response = self._upsert_raw(dictionary_id, word_combos_in_jsonl) + return self._parse_response(response) + + def _dump_to_jsonl( + self, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> str: + word_root_strs = [json.dumps(combo) for combo in word_root_combinations] + + return "\n".join(word_root_strs) + + def _parse_response( + self, + response: str, + ) -> typing.List[StemmingDictionaryCreateSchema]: + object_list: typing.List[StemmingDictionaryCreateSchema] = [] + + for line in response.split("\n"): + try: + decoded = json.loads(line) + except json.JSONDecodeError as err: + raise ValueError(f"Failed to parse JSON from response: {line}") from err + object_list.append(decoded) + return object_list + + def _upsert_raw( + self, + dictionary_id: str, + word_root_combinations: typing.Union[bytes, str], + ) -> str: + response: str = self.api_call.post( + self._endpoint_path("import"), + body=word_root_combinations, + as_json=False, + entity_type=str, + params={"id": dictionary_id}, + ) + return response + + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for this specific stemming dictionary. + + Args: + action (str, optional): The action to perform on the stemming dictionary. + Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + if action: + return f"{StemmingDictionaries.resource_path}/{action}" + return StemmingDictionaries.resource_path diff --git a/src/typesense/sync/stemming_dictionary.py b/src/typesense/sync/stemming_dictionary.py new file mode 100644 index 0000000..f2b46d7 --- /dev/null +++ b/src/typesense/sync/stemming_dictionary.py @@ -0,0 +1,75 @@ +""" +Module for managing individual stemming dictionaries in Typesense (async). + +This module provides a class for managing individual stemming dictionaries in Typesense, +including retrieving them asynchronously. + +Classes: + - StemmingDictionary: Handles async operations related to individual stemming dictionaries. + +Methods: + - __init__: Initializes the StemmingDictionary object. + - retrieve: Retrieves this specific stemming dictionary. + +The StemmingDictionary class interacts with the Typesense API to manage operations on a +specific stemming dictionary. It provides methods to retrieve the dictionary details. + +For more information on stemming dictionaries, refer to the Stemming +[documentation](https://typesense.org/docs/28.0/api/stemming.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import ApiCall +from typesense.types.stemming import StemmingDictionarySchema + + +class StemmingDictionary: + """ + Class for managing individual stemming dictionaries in Typesense (async). + + This class provides methods to interact with a specific stemming dictionary, + including retrieving it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + dict_id (str): The ID of the stemming dictionary. + """ + + def __init__(self, api_call: ApiCall, dict_id: str): + """ + Initialize the StemmingDictionary object. + + Args: + api_call (ApiCall): The API call object for making requests. + dict_id (str): The ID of the stemming dictionary. + """ + self.api_call = api_call + self.dict_id = dict_id + + def retrieve(self) -> StemmingDictionarySchema: + """ + Retrieve this specific stemming dictionary. + + Returns: + StemmingDictionarySchema: The schema containing the stemming dictionary details. + """ + response: StemmingDictionarySchema = self.api_call.get( + self._endpoint_path, + entity_type=StemmingDictionarySchema, + as_json=True, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific stemming dictionary. + + Returns: + str: The constructed endpoint path. + """ + from .stemming_dictionaries import StemmingDictionaries + + return "/".join([StemmingDictionaries.resource_path, self.dict_id]) diff --git a/src/typesense/sync/stopwords.py b/src/typesense/sync/stopwords.py new file mode 100644 index 0000000..b0641eb --- /dev/null +++ b/src/typesense/sync/stopwords.py @@ -0,0 +1,117 @@ +""" +This module provides async functionality for managing stopwords in Typesense. + +Classes: + - Stopwords: Handles async operations related to stopwords and stopword sets. + +Methods: + - __init__: Initializes the Stopwords object. + - __getitem__: Retrieves or creates an StopwordsSet object for a given stopwords_set_id. + - upsert: Creates or updates a stopwords set. + - retrieve: Retrieves all stopwords sets. + +Attributes: + - RESOURCE_PATH: The API resource path for stopwords operations. + +The Stopwords class interacts with the Typesense API to manage stopwords operations. +It provides methods to create, update, and retrieve stopwords sets, as well as access +individual StopwordsSet objects. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from .stopwords_set import StopwordsSet +from typesense.types.stopword import ( + StopwordCreateSchema, + StopwordSchema, + StopwordsRetrieveSchema, +) + + +class Stopwords: + """ + Class for managing stopwords in Typesense (async). + + This class provides methods to interact with stopwords and stopwords sets, including + creating, updating, retrieving, and accessing individual stopwords sets. + + Attributes: + RESOURCE_PATH (str): The API resource path for stopwords operations. + api_call (ApiCall): The API call object for making requests. + stopwords_sets (Dict[str, StopwordsSet]): A dictionary of StopwordsSet objects. + """ + + resource_path: typing.Final[str] = "/stopwords" + + def __init__(self, api_call: ApiCall): + """ + Initialize the Stopwords object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.stopwords_sets: typing.Dict[str, StopwordsSet] = {} + + def __getitem__(self, stopwords_set_id: str) -> StopwordsSet: + """ + Get or create an StopwordsSet object for a given stopwords_set_id. + + Args: + stopwords_set_id (str): The ID of the stopwords set. + + Returns: + StopwordsSet: The StopwordsSet object for the given ID. + """ + if not self.stopwords_sets.get(stopwords_set_id): + self.stopwords_sets[stopwords_set_id] = StopwordsSet( + self.api_call, + stopwords_set_id, + ) + return self.stopwords_sets[stopwords_set_id] + + def upsert( + self, + stopwords_set_id: str, + stopwords_set: StopwordCreateSchema, + ) -> StopwordSchema: + """ + Create or update a stopwords set. + + Args: + stopwords_set_id (str): The ID of the stopwords set to upsert. + stopwords_set (StopwordCreateSchema): + The schema for creating or updating the stopwords set. + + Returns: + StopwordSchema: The created or updated stopwords set. + """ + response: StopwordSchema = self.api_call.put( + "/".join([Stopwords.resource_path, stopwords_set_id]), + body=stopwords_set, + entity_type=StopwordSchema, + ) + return response + + def retrieve(self) -> StopwordsRetrieveSchema: + """ + Retrieve all stopwords sets. + + Returns: + StopwordsRetrieveSchema: The schema containing all stopwords sets. + """ + response: StopwordsRetrieveSchema = self.api_call.get( + Stopwords.resource_path, + as_json=True, + entity_type=StopwordsRetrieveSchema, + ) + return response diff --git a/src/typesense/sync/stopwords_set.py b/src/typesense/sync/stopwords_set.py new file mode 100644 index 0000000..072d367 --- /dev/null +++ b/src/typesense/sync/stopwords_set.py @@ -0,0 +1,87 @@ +""" +This module provides async functionality for managing individual stopwords sets in Typesense. + +Classes: + - StopwordsSet: Handles async operations related to a specific stopwords set. + +Methods: + - __init__: Initializes the StopwordsSet object. + - retrieve: Retrieves the details of this specific stopwords set. + - delete: Deletes this specific stopwords set. + - _endpoint_path: Constructs the API endpoint path for this specific stopwords set. + +The StopwordsSet class interacts with the Typesense API to manage operations on a +specific stopwords set. It provides methods to retrieve and delete individual stopwords sets. + +For more information regarding Stopwords, refer to the Stopwords [documentation] +(https://typesense.org/docs/27.0/api/stopwords.html). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import ApiCall +from typesense.types.stopword import StopwordDeleteSchema, StopwordsSingleRetrieveSchema + + +class StopwordsSet: + """ + Class for managing individual stopwords sets in Typesense (async). + + This class provides methods to interact with a specific stopwords set, + including retrieving and deleting it. + + Attributes: + stopwords_set_id (str): The ID of the stopwords set. + api_call (ApiCall): The API call object for making requests. + """ + + def __init__(self, api_call: ApiCall, stopwords_set_id: str) -> None: + """ + Initialize the StopwordsSet object. + + Args: + api_call (ApiCall): The API call object for making requests. + stopwords_set_id (str): The ID of the stopwords set. + """ + self.stopwords_set_id = stopwords_set_id + self.api_call = api_call + + def retrieve(self) -> StopwordsSingleRetrieveSchema: + """ + Retrieve this specific stopwords set. + + Returns: + StopwordsSingleRetrieveSchema: The schema containing the stopwords set details. + """ + response: StopwordsSingleRetrieveSchema = self.api_call.get( + self._endpoint_path, + entity_type=StopwordsSingleRetrieveSchema, + as_json=True, + ) + return response + + def delete(self) -> StopwordDeleteSchema: + """ + Delete this specific stopwords set. + + Returns: + StopwordDeleteSchema: The schema containing the deletion response. + """ + response: StopwordDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=StopwordDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific stopwords set. + + Returns: + str: The constructed endpoint path. + """ + from .stopwords import Stopwords + + return "/".join([Stopwords.resource_path, self.stopwords_set_id]) diff --git a/src/typesense/sync/synonym.py b/src/typesense/sync/synonym.py new file mode 100644 index 0000000..d091fdd --- /dev/null +++ b/src/typesense/sync/synonym.py @@ -0,0 +1,104 @@ +""" +This module provides async functionality for managing individual synonyms in Typesense. + +Classes: + - Synonym: Handles async operations related to a specific synonym within a collection. + +Methods: + - __init__: Initializes the Synonym object. + - _endpoint_path: Constructs the API endpoint path for this specific synonym. + - retrieve: Retrieves the details of this specific synonym. + - delete: Deletes this specific synonym. + +The Synonym class interacts with the Typesense API to manage operations on a +specific synonym within a collection. It provides methods to retrieve and delete +individual synonyms. + +For more information regarding Synonyms, refer to the Synonyms [documentation] +(https://typesense.org/docs/27.0/api/synonyms.html#synonyms). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from .api_call import ApiCall +from typesense.logger import warn_deprecation +from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema + + +class Synonym: + """ + Class for managing individual synonyms in a Typesense collection (async). + + This class provides methods to interact with a specific synonym, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonym_id (str): The ID of the synonym. + """ + + def __init__( + self, + api_call: ApiCall, + collection_name: str, + synonym_id: str, + ) -> None: + """ + Initialize the Synonym object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonym_id (str): The ID of the synonym. + """ + self.api_call = api_call + self.collection_name = collection_name + self.synonym_id = synonym_id + + def retrieve(self) -> SynonymSchema: + """ + Retrieve this specific synonym. + + Returns: + SynonymSchema: The schema containing the synonym details. + """ + return self.api_call.get(self._endpoint_path, entity_type=SynonymSchema) + + def delete(self) -> SynonymDeleteSchema: + """ + Delete this specific synonym. + + Returns: + SynonymDeleteSchema: The schema containing the deletion response. + """ + return self.api_call.delete( + self._endpoint_path, + entity_type=SynonymDeleteSchema, + ) + + @property + @warn_deprecation( # type: ignore[untyped-decorator] + "The synonym API (collections/{collection}/synonyms/{synonym_id}) is deprecated is removed on v30+. " + "Use synonym sets (synonym_sets) instead.", + flag_name="synonyms_deprecation", + ) + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific synonym. + + Returns: + str: The constructed endpoint path. + """ + from .collections import Collections + from .synonyms import Synonyms + + return "/".join( + [ + Collections.resource_path, + self.collection_name, + Synonyms.resource_path, + self.synonym_id, + ], + ) diff --git a/src/typesense/sync/synonym_set.py b/src/typesense/sync/synonym_set.py new file mode 100644 index 0000000..dc11d0b --- /dev/null +++ b/src/typesense/sync/synonym_set.py @@ -0,0 +1,102 @@ +"""Client for single Synonym Set operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from typesense.types.synonym_set import ( + SynonymItemDeleteSchema, + SynonymItemSchema, + SynonymSetCreateSchema, + SynonymSetDeleteSchema, + SynonymSetRetrieveSchema, +) + + +class SynonymSet: + def __init__(self, api_call: ApiCall, name: str) -> None: + self.api_call = api_call + self.name = name + + @property + def _endpoint_path(self) -> str: + from .synonym_sets import SynonymSets + + return "/".join([SynonymSets.resource_path, self.name]) + + def retrieve(self) -> SynonymSetRetrieveSchema: + response: SynonymSetRetrieveSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=SynonymSetRetrieveSchema, + ) + return response + + def upsert(self, set: SynonymSetCreateSchema) -> SynonymSetCreateSchema: + response: SynonymSetCreateSchema = self.api_call.put( + self._endpoint_path, + entity_type=SynonymSetCreateSchema, + body=set, + ) + return response + + def delete(self) -> SynonymSetDeleteSchema: + response: SynonymSetDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=SynonymSetDeleteSchema, + ) + return response + + @property + def _items_path(self) -> str: + return "/".join([self._endpoint_path, "items"]) # /synonym_sets/{name}/items + + def list_items( + self, + *, + limit: typing.Union[int, None] = None, + offset: typing.Union[int, None] = None, + ) -> typing.List[SynonymItemSchema]: + params: typing.Dict[str, typing.Union[int, None]] = { + "limit": limit, + "offset": offset, + } + clean_params: typing.Dict[str, int] = { + k: v for k, v in params.items() if v is not None + } + response: typing.List[SynonymItemSchema] = self.api_call.get( + self._items_path, + as_json=True, + entity_type=typing.List[SynonymItemSchema], + params=clean_params or None, + ) + return response + + def get_item(self, item_id: str) -> SynonymItemSchema: + response: SynonymItemSchema = self.api_call.get( + "/".join([self._items_path, item_id]), + as_json=True, + entity_type=SynonymItemSchema, + ) + return response + + def upsert_item( + self, item_id: str, item: SynonymItemSchema + ) -> SynonymItemSchema: + response: SynonymItemSchema = self.api_call.put( + "/".join([self._items_path, item_id]), + body=item, + entity_type=SynonymItemSchema, + ) + return response + + def delete_item(self, item_id: str) -> SynonymItemDeleteSchema: + # API returns {"id": "..."} for delete; openapi defines SynonymItemDeleteResponse with name but for items it's id + response: SynonymItemDeleteSchema = self.api_call.delete( + "/".join([self._items_path, item_id]), entity_type=SynonymItemDeleteSchema + ) + return response diff --git a/src/typesense/sync/synonym_sets.py b/src/typesense/sync/synonym_sets.py new file mode 100644 index 0000000..4ffd73f --- /dev/null +++ b/src/typesense/sync/synonym_sets.py @@ -0,0 +1,34 @@ +"""Client for Synonym Sets collection operations (async).""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from .api_call import ApiCall +from .synonym_set import SynonymSet +from typesense.types.synonym_set import ( + SynonymSetSchema, +) + + +class SynonymSets: + resource_path: typing.Final[str] = "/synonym_sets" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + + def retrieve(self) -> typing.List[SynonymSetSchema]: + response: typing.List[SynonymSetSchema] = self.api_call.get( + SynonymSets.resource_path, + as_json=True, + entity_type=typing.List[SynonymSetSchema], + ) + return response + + def __getitem__(self, synonym_set_name: str) -> SynonymSet: + from .synonym_set import SynonymSet as PerSet + + return PerSet(self.api_call, synonym_set_name) diff --git a/src/typesense/sync/synonyms.py b/src/typesense/sync/synonyms.py new file mode 100644 index 0000000..d6e055b --- /dev/null +++ b/src/typesense/sync/synonyms.py @@ -0,0 +1,152 @@ +""" +This module provides async functionality for managing synonyms in Typesense. + +Classes: + - Synonyms: Handles async operations related to synonyms within a collection. + +Methods: + - __init__: Initializes the Synonyms object. + - __getitem__: Retrieves or creates an Synonym object for a given synonym_id. + - _endpoint_path: Constructs the API endpoint path for synonym operations. + - upsert: Creates or updates a synonym. + - retrieve: Retrieves all synonyms for the collection. + +Attributes: + - RESOURCE_PATH: The API resource path for synonyms. + +The Synonyms class interacts with the Typesense API to manage synonym operations +within a specific collection. It provides methods to create, update, and retrieve +synonyms, as well as access individual Synonym objects. + +For more information regarding Synonyms, refer to the Synonyms [documentation] +(https://typesense.org/docs/27.0/api/synonyms.html#synonyms). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typing_extensions import deprecated + +from .api_call import ApiCall +from .synonym import Synonym +from typesense.logger import warn_deprecation +from typesense.types.synonym import ( + SynonymCreateSchema, + SynonymSchema, + SynonymsRetrieveSchema, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +@deprecated("SyncSynonyms is deprecated on v30+. Use client.synonym_sets instead.") +class Synonyms: + """ + Class for managing synonyms in a Typesense collection (async). + + This class provides methods to interact with synonyms, including + retrieving, creating, and updating them. + + Attributes: + RESOURCE_PATH (str): The API resource path for synonyms. + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonyms (Dict[str, Synonym]): A dictionary of Synonym objects. + """ + + resource_path: typing.Final[str] = "synonyms" + + def __init__(self, api_call: ApiCall, collection_name: str) -> None: + """ + Initialize the Synonyms object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ + self.api_call = api_call + self.collection_name = collection_name + self.synonyms: typing.Dict[str, Synonym] = {} + + def __getitem__(self, synonym_id: str) -> Synonym: + """ + Get or create an Synonym object for a given synonym_id. + + Args: + synonym_id (str): The ID of the synonym. + + Returns: + Synonym: The Synonym object for the given ID. + """ + if not self.synonyms.get(synonym_id): + self.synonyms[synonym_id] = Synonym( + self.api_call, + self.collection_name, + synonym_id, + ) + return self.synonyms[synonym_id] + + def upsert( + self, synonym_id: str, schema: SynonymCreateSchema + ) -> SynonymSchema: + """ + Create or update a synonym. + + Args: + id (str): The ID of the synonym. + schema (SynonymCreateSchema): The schema for creating or updating the synonym. + + Returns: + SynonymSchema: The created or updated synonym. + """ + response = self.api_call.put( + self._endpoint_path(synonym_id), + body=schema, + entity_type=SynonymSchema, + ) + return response + + def retrieve(self) -> SynonymsRetrieveSchema: + """ + Retrieve all synonyms for the collection. + + Returns: + SynonymsRetrieveSchema: The schema containing all synonyms. + """ + response = self.api_call.get( + self._endpoint_path(), + entity_type=SynonymsRetrieveSchema, + ) + return response + + @warn_deprecation( # type: ignore[untyped-decorator] + "The synonyms API (collections/{collection}/synonyms) is deprecated is removed on v30+. " + "Use synonym sets (synonym_sets) instead.", + flag_name="synonyms_deprecation", + ) + def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for synonym operations. + + Args: + synonym_id (Union[str, None], optional): The ID of the synonym. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from .collections import Collections + + synonym_id = synonym_id or "" + return "/".join( + [ + Collections.resource_path, + self.collection_name, + Synonyms.resource_path, + synonym_id, + ], + ) diff --git a/src/typesense/types/__init__.py b/src/typesense/types/__init__.py new file mode 100644 index 0000000..d0c03eb --- /dev/null +++ b/src/typesense/types/__init__.py @@ -0,0 +1 @@ +"""Types for the Typesense Python Client.""" diff --git a/src/typesense/types/alias.py b/src/typesense/types/alias.py new file mode 100644 index 0000000..ac37428 --- /dev/null +++ b/src/typesense/types/alias.py @@ -0,0 +1,43 @@ +"""Alias types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AliasCreateSchema(typing.TypedDict): + """ + The schema for the request of the Aliases.create method. + + Attributes: + collection_name (str): The name of the collection. + """ + + collection_name: str + + +class AliasSchema(AliasCreateSchema): + """ + The schema for the response of the Aliases.create method. + + Attributes: + name (str): The name of the alias. + + collection_name (str): The name of the collection. + """ + + name: str + + +class AliasesResponseSchema(typing.TypedDict): + """ + The schema for the response of the Aliases.retrieve method. + + Attributes: + aliases(list[CollectionAliasSchema]): The list of aliases. + """ + + aliases: typing.List[AliasSchema] diff --git a/src/typesense/types/analytics.py b/src/typesense/types/analytics.py new file mode 100644 index 0000000..b442f7e --- /dev/null +++ b/src/typesense/types/analytics.py @@ -0,0 +1,81 @@ +"""Types for Analytics endpoints and Analytics Rules.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AnalyticsEvent(typing.TypedDict): + """Schema for an analytics event to be created.""" + + name: str + data: typing.Dict[str, typing.Any] + + +class AnalyticsEventCreateResponse(typing.TypedDict): + """Response schema for creating an analytics event and for flush.""" + + ok: bool + + +class _AnalyticsEventItem(typing.TypedDict, total=False): + name: str + collection: str + timestamp: typing.NotRequired[int] + user_id: str + doc_id: typing.NotRequired[str] + doc_ids: typing.NotRequired[typing.List[str]] + query: typing.NotRequired[str] + + +class AnalyticsEventsResponse(typing.TypedDict): + """Response schema for retrieving analytics events.""" + + events: typing.List[_AnalyticsEventItem] + + +class AnalyticsStatus(typing.TypedDict, total=False): + """Response schema for analytics status.""" + + popular_prefix_queries: int + nohits_prefix_queries: int + log_prefix_queries: int + query_log_events: int + query_counter_events: int + doc_log_events: int + doc_counter_events: int + + +# Rules + + +class AnalyticsRuleParams(typing.TypedDict, total=False): + destination_collection: typing.NotRequired[str] + limit: typing.NotRequired[int] + capture_search_requests: typing.NotRequired[bool] + meta_fields: typing.NotRequired[typing.List[str]] + expand_query: typing.NotRequired[bool] + counter_field: typing.NotRequired[str] + weight: typing.NotRequired[int] + + +class AnalyticsRuleCreate(typing.TypedDict): + name: str + type: str + collection: str + event_type: str + params: typing.NotRequired[AnalyticsRuleParams] + rule_tag: typing.NotRequired[str] + + +class AnalyticsRuleUpdate(typing.TypedDict, total=False): + name: str + rule_tag: str + params: AnalyticsRuleParams + + +class AnalyticsRuleSchema(AnalyticsRuleCreate, total=False): + pass diff --git a/src/typesense/types/analytics_rule_v1.py b/src/typesense/types/analytics_rule_v1.py new file mode 100644 index 0000000..88ffd00 --- /dev/null +++ b/src/typesense/types/analytics_rule_v1.py @@ -0,0 +1,203 @@ +"""Analytics Rule V1 types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class Event(typing.TypedDict): + """ + Schema for analytics rule [events](https://typesense.org/docs/26.0/api/analytics-query-suggestions.html#analytics-query-suggestions). + + Attributes: + type (str): The [type](https://typesense.org/docs/26.0/api/analytics-query-suggestions.html#aggregating-multiple-events) of the event. + + - `click`: Tracking clicks against documents returned in search response.. + - `conversion`: The event is a click. + - `visit`: Tracking page visits to specific documents, useful for recommendations. + + weight (int): The weight of the event. + + name (str): The name of the event. + + """ + + type: typing.Literal["click", "conversion", "visit"] + weight: int + name: str + + +class _Source(typing.TypedDict): + """ + Schema for the source of the analytics rule. + + Attributes: + collections (list[str]): The list of collections. + + events (list[Event]): The list of events. + """ + + collections: typing.List[str] + events: typing.NotRequired[typing.List[Event]] + + +class _SourceForCounters(typing.TypedDict): + """ + Schema for the source of the analytics rule for counter rules. + + Attributes: + collections (list[str]): The list of collections. + + events (list[Event]): The list of events. + """ + + collections: typing.List[str] + events: typing.List[Event] + + +class _Destination(typing.TypedDict): + """ + Schema for the destination of the analytics rule. + + Attributes: + collection (str): The destination collection. + + counter_field (str): The counter field of the collection. + """ + + collection: str + counter_field: typing.NotRequired[str] + + +class _DestinationForCounters(typing.TypedDict): + """ + Schema for the destination of the analytics rule for counter rules. + + Attributes: + collection (str): The destination collection. + + counter_field (str): The counter field of the collection. + """ + + collection: str + counter_field: str + + +class _RuleParams(typing.TypedDict): + """ + Schema for the analytics rule parameters. + + Attributes: + source (_Source): The source of the analytics rule. + + expand_query (bool): Whether to expand the query. + + destination (_Destination): The destination of the analytics rule. + + limit (int): The limit of the analytics rule. + """ + + source: _Source + expand_query: typing.NotRequired[bool] + destination: _Destination + limit: typing.NotRequired[int] + + +class _RuleParamsForCounters(typing.TypedDict): + """ + Schema for the analytics rule parameters for counter rules. + + Attributes: + source (_SourceForCounters): The source of the analytics rule. + + destination (_DestinationForCounters): The destination of the analytics rule. + + limit (int): The limit of the analytics + """ + + source: _SourceForCounters + destination: _DestinationForCounters + limit: typing.NotRequired[int] + + +class RuleCreateSchemaForQueries(typing.TypedDict): + """ + Schema for the request of the AnalyticsRules.create method. + + Attributes: + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + type: typing.Literal["popular_queries", "nohits_queries"] + params: _RuleParams + + +class RuleCreateSchemaForCounters(typing.TypedDict): + """ + Schema for the request of the AnalyticsRules.create method. + + Attributes: + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + type: typing.Literal["counter"] + params: _RuleParamsForCounters + + +class RuleSchemaForQueries(RuleCreateSchemaForQueries): + """ + Schema for the response of the AnalyticsRules.create method. + + Attributes: + name (str): The name of the analytics rule. + + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + name: str + + +class RuleSchemaForCounters(RuleCreateSchemaForCounters): + """ + Schema for the response of the AnalyticsRules.create method. + + Attributes: + name (str): The name of the analytics rule. + + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + name: str + + +class RuleDeleteSchema(typing.TypedDict): + """ + Schema for the response of the AnalyticsRules.delete method. + + Attributes: + name (str): The name of the analytics rule. + """ + + name: str + + +class RulesRetrieveSchema(typing.TypedDict): + """ + Schema for the response of the AnalyticsRules.retrieve method. + + Attributes: + rules(typing.List[AnalyticsRuleSchema]): The list of analytics rules. + """ + + rules: typing.List[typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]] diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py new file mode 100644 index 0000000..e49fbc0 --- /dev/null +++ b/src/typesense/types/collection.py @@ -0,0 +1,238 @@ +"""Collection types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +_TType = typing.TypeVar("_TType") + +_FieldType = typing.Literal[ + "string", + "int32", + "int64", + "float", + "bool", + "geopoint", + "geopolygon", + "geopoint[]", + "string[]", + "int32[]", + "int64[]", + "float[]", + "bool[]", + "object", + "object[]", + "auto", + "string*", + "image", +] + +_ReferenceFieldType = typing.Literal["string", "int32", "int64", "float"] + +Locales = typing.Literal["ja", "zh", "ko", "th", "el", "ru", "rs", "uk", "be", ""] + + +class HNSWParamsSchema(typing.TypedDict): + """ + The schema for the HNSW parameters in the CollectionFieldSchema. + + Attributes: + M (int): The number of bi-directional links created for every new element. + ef_construction (int): The size of the dynamic list for the nearest neighbors. + """ + + M: typing.NotRequired[int] + ef_construction: typing.NotRequired[int] + + +class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=False): + """ + CollectionFieldSchema represents the schema of a field in a collection. + + Attributes: + name (str): The name of the field. + type (TType): The type of the field. + facet (bool): Whether the field is a facet. + optional (bool): Whether the field is optional. + infix (bool): Whether the field is an infix. + stem (bool): Whether the field is a stem. + symbols_to_index (list[str]): The symbols to index + token_separators (list[str]): The token separators. + locale (Locales): The locale of the field. + sort (bool): Whether the field is sortable. + store (bool): Whether the field is stored. + num_dim (float): The number of dimensions. + range_index (bool): Whether the field is a range index. + index (bool): Whether the field is indexed. + vec_dist (typing.Literal['cosine', 'ip'] | str): The vector distance. + """ + + name: str + type: typing.NotRequired[_TType] + facet: typing.NotRequired[bool] + optional: typing.NotRequired[bool] + infix: typing.NotRequired[bool] + stem: typing.NotRequired[bool] + stem_dictionary: typing.NotRequired[str] + locale: typing.NotRequired[Locales] + sort: typing.NotRequired[bool] + store: typing.NotRequired[bool] + symbols_to_index: typing.NotRequired[typing.List[str]] + token_separators: typing.NotRequired[typing.List[str]] + num_dim: typing.NotRequired[float] + hnsw_params: typing.NotRequired[HNSWParamsSchema] + range_index: typing.NotRequired[bool] + index: typing.NotRequired[bool] + vec_dist: typing.NotRequired[typing.Union[typing.Literal["cosine", "ip"], str]] + + +class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): + """ + The schema of a regular field in a collection. + + Attributes: + name (str): The name of the field. + type (FieldType): The type of the field. + facet (bool): Whether the field is a facet. + optional (bool): Whether the field is optional. + infix (bool): Whether the field is an infix. + stem (bool): Whether the field is a stem. + locale (Locales): The locale of the field. + sort (bool): Whether the field is sortable. + symbols_to_index (list[str]): The symbols to index + token_separators (list[str]): The token separators. + store (bool): Whether the field is stored. + num_dim (float): The number of dimensions. + range_index (bool): Whether the field is a range index. + index (bool): Whether the field is indexed. + vec_dist (typing.Literal['cosine', 'ip'] | str): The vector distance. + """ + + +class ReferenceCollectionFieldSchema(CollectionFieldSchema[_ReferenceFieldType]): + """ + The schema of a field referencing another field from a foreign Collection. + + Attributes: + name (str): The name of the field. + type (ReferenceFieldType): The type of the field. + facet (bool): Whether the field is a facet. + optional (bool): Whether the field is optional. + infix (bool): Whether the field is an infix. + symbols_to_index (list[str]): The symbols to index + token_separators (list[str]): The token separators. + stem (bool): Whether the field is a stem. + locale (Locales): The locale of the field. + sort (bool): Whether the field is sortable. + store (bool): Whether the field is stored. + num_dim (float): The number of dimensions. + range_index (bool): Whether the field is a range index. + index (bool): Whether the field is indexed. + vec_dist (typing.Literal['cosine', 'ip'] | str): The vector distance.:w + """ + + reference: str + + +class DropCollectionFieldSchema(typing.TypedDict): + """The schema for the field in the CollectionUpdateSchema.""" + + drop: typing.Literal[True] + name: str + + +class VoiceQueryModelSchema(typing.TypedDict): + """The schema for the voice_query_model field in the CollectionCreateSchema.""" + + model_name: str + + +class CollectionCreateSchema(typing.TypedDict): + """ + The schema for the request of the Collections.create method. + + Attributes: + name (str): The name of the collection. + + fields (list[RegularCollectionFieldSchema | ReferenceCollectionFieldSchema]): The fields + of the collection. + + default_sorting_field (str): The default sorting field of the collection. + + symbols_to_index (list[str]): The symbols to index. + + token_separators (list[str]): The token separators. + + enable_nested_fields (bool): Whether nested fields are enabled. + + voice_query_model (VoiceQueryModelSchema): The voice query model. + """ + + name: str + fields: typing.List[ + typing.Union[RegularCollectionFieldSchema, ReferenceCollectionFieldSchema] + ] + default_sorting_field: typing.NotRequired[str] + symbols_to_index: typing.NotRequired[typing.List[str]] + token_separators: typing.NotRequired[typing.List[str]] + enable_nested_fields: typing.NotRequired[bool] + voice_query_model: typing.NotRequired[VoiceQueryModelSchema] + synonym_sets: typing.NotRequired[typing.List[typing.List[str]]] + + +class CollectionSchema(CollectionCreateSchema): + """ + The schema for the response of the Collections.create method. + + Attributes: + created_at (int): The creation timestamp of the collection. + + num_documents (int): The number of documents in the collection. + + num_memory_shards (int): The number of memory shards in the collection. + + name (str): The name of the collection. + + fields (list[RegularCollectionFieldSchema | ReferenceCollectionFieldSchema]): The fields + of the collection. + + default_sorting_field (str): The default sorting field of the collection. + + symbols_to_index (list[str]): The symbols to index. + + token_separators (list[str]): The token separators. + + enable_nested_fields (bool): Whether nested fields are enabled. + + voice_query_model (VoiceQueryModelSchema): The voice query model. + """ + + created_at: int + num_documents: int + num_memory_shards: int + + +class CollectionUpdateSchema(typing.TypedDict): + """ + The schema for the request of the Collection.update method. + + Attributes: + fields (list): The fields of the collection. + + """ + + fields: typing.NotRequired[ + typing.List[ + typing.Union[ + RegularCollectionFieldSchema, + ReferenceCollectionFieldSchema, + DropCollectionFieldSchema, + ] + ] + ] + synonym_sets: typing.NotRequired[typing.List[str]] + curation_sets: typing.NotRequired[typing.List[str]] diff --git a/src/typesense/types/conversations_model.py b/src/typesense/types/conversations_model.py new file mode 100644 index 0000000..c5d2c17 --- /dev/null +++ b/src/typesense/types/conversations_model.py @@ -0,0 +1,83 @@ +"""ConversationalModel types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class ConversationModelCreateSchema(typing.TypedDict): + """ + Schema for creating a new conversation model. + + Attributes: + model_name (str): Name of the LLM model offered by OpenAI, Cloudflare or vLLM. + + api_key (str): The LLM service's API Key. + + system_prompt (str): The system prompt that contains special instructions to the LLM. + + max_bytes (int): The maximum number of bytes to send to the LLM in every API call. + Consult the LLM's documentation on the number of bytes supported in the context window. + + history_collection (str): Typesense collection that stores the historical conversations. + + account_id (str): LLM service's account ID (only applicable for Cloudflare). + + ttl (int): Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours). + + vllm_url (str): The URL of the vLLM service. + + id (str): The custom ID of the model. + """ + + id: typing.NotRequired[str] + model_name: str + api_key: str + system_prompt: typing.NotRequired[str] + max_bytes: int + history_collection: str + account_id: typing.NotRequired[str] + ttl: typing.NotRequired[int] + vllm_url: typing.NotRequired[str] + + +class ConversationModelDeleteSchema(typing.TypedDict): + """ + Schema for deleting a conversation model. + + Attributes: + id (str): The ID of the model. + """ + + id: str + + +class ConversationModelSchema( + ConversationModelCreateSchema, +): + """ + Schema for a conversation model. + + Attributes: + model_name (str): Name of the LLM model offered by OpenAI, Cloudflare or vLLM. + + api_key (str): The LLM service's API Key. + + system_prompt (str): The system prompt that contains special instructions to the LLM. + + max_bytes (int): The maximum number of bytes to send to the LLM in every API call. + Consult the LLM's documentation on the number of bytes supported in the context window. + + history_collection (str): Typesense collection that stores the historical conversations. + + account_id (str): LLM service's account ID (only applicable for Cloudflare). + + ttl (int): Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours). + + vllm_url (str): The URL of the vLLM service. + + id (str): The custom ID of the model. + """ diff --git a/src/typesense/types/curation_set.py b/src/typesense/types/curation_set.py new file mode 100644 index 0000000..6468166 --- /dev/null +++ b/src/typesense/types/curation_set.py @@ -0,0 +1,128 @@ +"""Curation Set types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class CurationIncludeSchema(typing.TypedDict): + """ + Schema representing an included document for a curation rule. + """ + + id: str + position: int + + +class CurationExcludeSchema(typing.TypedDict): + """ + Schema representing an excluded document for a curation rule. + """ + + id: str + + +class CurationRuleTagsSchema(typing.TypedDict): + """ + Schema for a curation rule using tags. + """ + + tags: typing.List[str] + + +class CurationRuleQuerySchema(typing.TypedDict): + """ + Schema for a curation rule using query and match. + """ + + query: str + match: typing.Literal["exact", "contains"] + + +class CurationRuleFilterBySchema(typing.TypedDict): + """ + Schema for a curation rule using filter_by. + """ + + filter_by: str + + +CurationRuleSchema = typing.Union[ + CurationRuleTagsSchema, + CurationRuleQuerySchema, + CurationRuleFilterBySchema, +] +""" +Schema representing rule conditions for a curation item. + +A curation rule must be exactly one of: +- A tags-based rule: `{ tags: string[] }` +- A query-based rule: `{ query: string; match: "exact" | "contains" }` +- A filter_by-based rule: `{ filter_by: string }` +""" + + +class CurationItemSchema(typing.TypedDict): + """ + Schema for a single curation item (aka CurationObject in the API). + """ + + id: str + rule: CurationRuleSchema + includes: typing.NotRequired[typing.List[CurationIncludeSchema]] + excludes: typing.NotRequired[typing.List[CurationExcludeSchema]] + filter_by: typing.NotRequired[str] + sort_by: typing.NotRequired[str] + replace_query: typing.NotRequired[str] + remove_matched_tokens: typing.NotRequired[bool] + filter_curated_hits: typing.NotRequired[bool] + stop_processing: typing.NotRequired[bool] + effective_from_ts: typing.NotRequired[int] + effective_to_ts: typing.NotRequired[int] + metadata: typing.NotRequired[typing.Dict[str, typing.Any]] + + +class CurationSetUpsertSchema(typing.TypedDict): + """ + Payload schema to create or replace a curation set. + """ + + items: typing.List[CurationItemSchema] + + +class CurationSetSchema(CurationSetUpsertSchema, total=False): + """ + Response schema for a curation set. + """ + + name: typing.NotRequired[str] + + +class CurationSetsListEntrySchema(typing.TypedDict): + """A single entry in the curation sets list response.""" + + name: str + items: typing.List[CurationItemSchema] + + +class CurationSetsListResponseSchema(typing.List[CurationSetsListEntrySchema]): + """List response for all curation sets.""" + + +class CurationSetListItemResponseSchema(typing.List[CurationItemSchema]): + """List response for items under a specific curation set.""" + + +class CurationItemDeleteSchema(typing.TypedDict): + """Response schema for deleting a curation item.""" + + id: str + + +class CurationSetDeleteSchema(typing.TypedDict): + """Response schema for deleting a curation set.""" + + name: str diff --git a/src/typesense/types/debug.py b/src/typesense/types/debug.py new file mode 100644 index 0000000..4394896 --- /dev/null +++ b/src/typesense/types/debug.py @@ -0,0 +1,21 @@ +"""Types for the debug endpoint.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class DebugResponseSchema(typing.TypedDict): + """ + Response schema for the debug endpoint. + + Attributes: + state (int): The state of the Typesense server. + version (str): The version of the Typesense server. + """ + + state: int + version: str diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py new file mode 100644 index 0000000..496432d --- /dev/null +++ b/src/typesense/types/document.py @@ -0,0 +1,975 @@ +"""Types for document operations in Typesense.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +_InfixOperations = typing.Literal["off", "always", "fallback"] +""" +Infix operations for search queries. + +- `off`: infix search is disabled, which is default. +- `always`: infix search is performed along with regular search. +- `fallback`: infix search is performed if regular search does not produce results. +""" + +_SequenceTypes = typing.Union[ + typing.Sequence[int], + typing.Sequence[str], + typing.Sequence[float], +] + +_Types = typing.Union[int, str, float, bool] + +DocumentSchema: typing.TypeAlias = typing.Mapping[ + str, + typing.Union[ + _Types, + _SequenceTypes, + "DocumentSchema", + typing.Sequence["DocumentSchema"], + ], +] +""" +Valid types for a document schema. + +It can be a mapping of a string to any of the following types: + +- `int` +- `str` +- `float` +- `bool` + +Their respective sequences, or a nested schema of the same type. +""" + +TDoc = typing.TypeVar("TDoc", bound="DocumentSchema") + + +class DirtyValuesParameters(typing.TypedDict): + """ + Parameters for handling dirty values in documents. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + dirty_values: typing.NotRequired[ + typing.Literal["coerce_or_reject", "coerce_or_drop", "drop", "reject"] + ] + + +class DocumentWriteParameters(DirtyValuesParameters): + """ + Parameters for writing documents. + + Attributes: + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + action: typing.NotRequired[typing.Literal["create", "update", "upsert", "emplace"]] + + +class UpdateByFilterParameters(typing.TypedDict): + """ + Parameters for updating documents by filter. + + Attributes: + filter_by(str): Filter to apply to documents. + """ + + filter_by: str + + +class UpdateByFilterResponse(typing.TypedDict): + """ + Response from updating documents by filter. + + Attributes: + num_updated(int): Indicates the success of the operation. + """ + + num_updated: int + + +class ImportResponseSuccess(typing.TypedDict): + """ + Response for a successful import operation. + + Attributes: + success(True): Indicates the success of the operation. + """ + + success: typing.Literal[True] + + +class ImportResponseWithDocAndId(typing.Generic[TDoc], ImportResponseSuccess): + """ + Response for a successful import operation with document and id. + + Attributes: + success(True): Indicates the success of the operation. + doc(TDoc): Imported document. + id(str): ID of the imported document. + """ + + id: str + doc: TDoc + + +class ImportResponseWithDoc(typing.Generic[TDoc], ImportResponseSuccess): + """ + Response for a successful import operation with document. + + Attributes: + success(True): Indicates the success of the operation. + doc(TDoc): Imported document. + """ + + doc: TDoc + + +class ImportResponseWithId(ImportResponseSuccess): + """ + Response for a successful import operation with ID. + + Attributes: + success(True): Indicates the success of the operation. + id(str): ID of the imported document. + """ + + id: str + + +class ImportResponseFail(typing.Generic[TDoc], typing.TypedDict): + """ + Response for a failed import operation. + + Attributes: + success (False): Indicates the success of the operation. + error (str): Error message. + code (int): Error code. + document (TDoc): Document that failed to import. + """ + + success: typing.Literal[False] + error: str + code: int + document: TDoc + + +ImportResponse: typing.TypeAlias = typing.Union[ + typing.List[typing.Union[ImportResponseWithDoc[TDoc], ImportResponseFail[TDoc]]], + typing.List[typing.Union[ImportResponseWithId, ImportResponseFail[TDoc]]], + typing.List[ + typing.Union[ImportResponseWithDocAndId[TDoc], ImportResponseFail[TDoc]] + ], + typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]], +] +"""Set of all possible responses after an import operation.""" + + +class DocumentImportParametersReturnId(DocumentWriteParameters): + """ + Parameters for importing documents with return ID. + + Attributes: + return_id (True): Return the ID of the imported document. + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + return_id: typing.Literal[True] + + +class DocumentImportParametersReturnDoc(DocumentWriteParameters): + """ + Parameters for importing documents with return document. + + Attributes: + return_doc (True): Return the imported document. + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + return_doc: typing.Literal[True] + + +class DocumentImportParametersReturnDocAndId(DocumentWriteParameters): + """ + Parameters for importing documents with return document and ID. + + Attributes: + return_doc (True): Return the imported document. + return_id (True): Return the ID of the imported document. + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + return_doc: typing.Literal[True] + return_id: typing.Literal[True] + + +DocumentImportParameters: typing.TypeAlias = typing.Union[ + DocumentWriteParameters, + DocumentImportParametersReturnId, + DocumentImportParametersReturnDoc, + DocumentImportParametersReturnDocAndId, +] +"""Set of all possible parameters for importing documents.""" + + +class DocumentExportParameters(typing.TypedDict): + """ + Parameters for [exporting documents](https://typesense.org/docs/26.0/api/documents.html#export-documents). + + Attributes: + filter_by (str): Filter to apply to documents. + include_fields (str): Fields to include in the exported documents. + exclude_fields (str): Fields to exclude from the exported documents. + """ + + filter_by: typing.NotRequired[str] + include_fields: typing.NotRequired[str] + exclude_fields: typing.NotRequired[str] + + +class RequiredSearchParameters(typing.TypedDict): + """ + Required parameters for searching documents. + + Attributes: + q (str): Query string to search for. + query_by (str): Field to search in. + """ + + q: str + query_by: str + + +class QueryParameters(typing.TypedDict): + """ + Parameters [regarding queries](https://typesense.org/docs/26.0/api/search.html#query-parameters). + + Attributes: + prefix (str, bool, list[bool]): Prefix operations for search queries. + infix (InfixOperations, list[InfixOperations]): Infix operations for search queries. + pre_segmented_query (bool): Indicates whether the query is pre-segmented. + preset (str): Preset for search queries. + vector_query (str): Vector query for search. + voice_query (str): Voice query for search. + stopwords (str, list[str]): A comma separated list of words to be dropped from the search query while searching. + validate_field_names (bool): Controls whether Typesense should validate if the fields exist in the schema. + """ + + prefix: typing.NotRequired[typing.Union[str, bool, typing.List[bool]]] + infix: typing.NotRequired[ + typing.Union[_InfixOperations, typing.List[_InfixOperations]] + ] + pre_segmented_query: typing.NotRequired[bool] + preset: typing.NotRequired[str] + vector_query: typing.NotRequired[str] + voice_query: typing.NotRequired[str] + stopwords: typing.NotRequired[typing.Union[str, typing.List[str]]] + validate_field_names: typing.NotRequired[bool] + + +class FilterParameters(typing.TypedDict): + """ + Parameters regarding [filtering search responses](https://typesense.org/docs/26.0/api/search.html#filter-parameters). + + Attributes: + filter_by (str): Filter to apply to search results. + enable_lazy_filter (bool): Enable lazy filtering. + """ + + filter_by: typing.NotRequired[str] + max_filter_by_candidates: typing.NotRequired[int] + enable_lazy_filter: typing.NotRequired[bool] + + +class RankingAndSortingParameters(typing.TypedDict): + """ + Parameters regarding [ranking and sorting search results](https://typesense.org/docs/26.0/api/search.html#ranking-and-sorting-parameters). + + Attributes: + query_by_weights (str, list[int]): Weights to apply to query fields. + text_match_type (str): Type of text match to prioritize. + + - `max_score`: Prioritize maximum score. + - `max_weight`: Prioritize maximum weight. + + sort_by (str, list[str]): Fields to sort search results by in order specified. + prioritize_exact_match (bool): Prioritize exact matches. + prioritize_token_position (bool): Prioritize token position. + prioritize_num_matching_fields (bool): Prioritize number of matching fields. + pinned_hits (dict[str, list[str]]): Pinned hits to prioritize. + hidden_hits (dict[str, list[str]]): Hidden hits to deprioritize. + enable_overrides (bool): Enable overrides. + override_tags (str, list[str]): Tags to override. + max_candidates (int): Maximum number of candidates to return. + enable_synonyms (bool): If you have some synonyms defined but want to disable all of them for a particular search query, set `enable_synonyms` to `false`. + filter_curated_hits (bool): Whether the `filter_by` condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc. + synonym_prefix (bool): Allow synonym resolution on word prefixes in the query. + """ + + query_by_weights: typing.NotRequired[typing.Union[str, typing.List[int]]] + text_match_type: typing.NotRequired[typing.Literal["max_score", "max_weight"]] + sort_by: typing.NotRequired[typing.Union[str, typing.List[str]]] + prioritize_exact_match: typing.NotRequired[bool] + prioritize_token_position: typing.NotRequired[bool] + prioritize_num_matching_fields: typing.NotRequired[bool] + pinned_hits: typing.NotRequired[typing.Dict[str, typing.List[str]]] + hidden_hits: typing.NotRequired[typing.Dict[str, typing.List[str]]] + enable_overrides: typing.NotRequired[bool] + override_tags: typing.NotRequired[typing.Union[str, typing.List[str]]] + max_candidates: typing.NotRequired[int] + enable_synonyms: typing.NotRequired[bool] + filter_curated_hits: typing.NotRequired[bool] + synonym_prefix: typing.NotRequired[bool] + + +class PaginationParameters(typing.TypedDict): + """ + Parameters regarding [pagination of search results](https://typesense.org/docs/26.0/api/search.html#pagination-parameters). + + Attributes: + page (int): Page number to retrieve. + per_page (int): Number of results per page. + offset (int): Offset to start retrieving results from. + limit (int): Limit of results to retrieve. + """ + + page: typing.NotRequired[int] + per_page: typing.NotRequired[int] + offset: typing.NotRequired[int] + limit: typing.NotRequired[int] + + +class FacetingParameters(typing.TypedDict): + """ + Parameters regarding [faceting search results](https://typesense.org/docs/26.0/api/search.html#faceting-parameters). + + Attributes: + facet_by (str, list[str]): Field to facet by. + max_facet_values (int): Maximum number of facet values to return. + facet_query (str): Query to facet by. + facet_query_num_typos (int): Number of typos to allow in facet query. + facet_return_parent (str): Return parent of facet. + facet_sample_percent (int): Sample percentage of facet values to return. + facet_sample_threshold (int): Sample threshold of facet values to return. + facet_strategy (str): Typesense supports two strategies for efficient faceting, and has some built-in heuristics to pick the right strategy for you. + """ + + facet_by: typing.NotRequired[typing.Union[str, typing.List[str]]] + max_facet_values: typing.NotRequired[int] + facet_query: typing.NotRequired[str] + facet_query_num_typos: typing.NotRequired[int] + facet_return_parent: typing.NotRequired[str] + facet_sample_percent: typing.NotRequired[int] + facet_sample_threshold: typing.NotRequired[int] + facet_strategy: typing.NotRequired[ + typing.Union[ + typing.Literal["exhaustive"], + typing.Literal["top_values"], + typing.Literal["automatic"], # default + ] + ] + + +class GroupingParameters(typing.TypedDict): + """ + Parameters regarding [grouping search results](https://typesense.org/docs/26.0/api/search.html#grouping-parameters). + + Attributes: + group_by (str): Field to group by. + group_limit (int): Limit of groups to return. + group_missing_values (bool): Include missing values in groups. + """ + + group_by: typing.NotRequired[str] + group_limit: typing.NotRequired[int] + group_missing_values: typing.NotRequired[bool] + + +class ResultsParameters(typing.TypedDict): + """ + Parameters regarding [search results](https://typesense.org/docs/26.0/api/search.html#results-parameters). + + Attributes: + include_fields (str, list[str]): Fields to include in search results. + exclude_fields (str, list[str]): Fields to exclude from search results. + highlight_fields (str, list[str]): Fields to highlight in search results. + highlight_full_fields (str, list[str]): Fields to highlight fully in search results. + highlight_affix_num_tokens (int): The number of tokens that should surround the highlighted text on each side. + highlight_start_tag (str): Start tag for highlighting. + highlight_end_tag (str): End tag for highlighting. + enable_highlight_v1 (bool): Flag for disabling the deprecated, old highlight structure in the response. + snippet_threshold (int): Field values under this length will be fully highlighted, instead of showing a snippet of relevant portion. + limit_hits (int): Limit the number of hits to return. + search_cutoff_ms (int): Search cutoff time in milliseconds. + exhaustive_search (bool): Perform exhaustive search. + """ + + include_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] + exclude_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] + highlight_fields: typing.NotRequired[ + typing.Union[typing.Literal["none"], str, typing.List[str]] + ] + highlight_full_fields: typing.NotRequired[ + typing.Union[typing.Literal["none"], str, typing.List[str]] + ] + highlight_affix_num_tokens: typing.NotRequired[int] + highlight_start_tag: typing.NotRequired[str] + highlight_end_tag: typing.NotRequired[str] + enable_highlight_v1: typing.NotRequired[bool] + snippet_threshold: typing.NotRequired[int] + limit_hits: typing.NotRequired[int] + search_cutoff_ms: typing.NotRequired[int] + exhaustive_search: typing.NotRequired[bool] + + +class TypoToleranceParameters(typing.TypedDict): + """ + Parameters regarding [typo tolerance in search results](https://typesense.org/docs/26.0/api/search.html#typo-tolerance-parameters). + + Attributes: + num_typos (int): Number of typos to allow in search results. + min_len_1typo (int): Minimum length of query to allow one typo. + min_len_2typo (int): Minimum length of query to allow two typos. + split_join_tokens (str): Treat space as a typo. + typo_tokens_threshold (int): Threshold for typo tokens. + drop_tokens_threshold (int): Threshold for dropping tokens. + drop_tokens_mode (str): Mode for dropping tokens. + + + - `right_to_left`: Drop tokens from right to left (default). + - `left_to_right`: Drop tokens from left to right. + - `both_sides:3`: Drop tokens from both sides with a threshold of 3. + Afterwards, drops back to the default right to left. + + enable_typos_for_numerical_tokens (bool): Set this parameter to `false` to disable typos on numerical query tokens. + enable_typos_for_alpha_numerical_tokens (bool): Set this parameter to `false` to disable typos on alphanumerical query tokens. + synonym_num_typos (int): Allow synonym resolution on typo-corrected words in the query. + """ + + num_typos: typing.NotRequired[int] + min_len_1typo: typing.NotRequired[int] + min_len_2typo: typing.NotRequired[int] + split_join_tokens: typing.NotRequired[typing.Literal["off", "fallback", "always"]] + typo_tokens_threshold: typing.NotRequired[int] + drop_tokens_threshold: typing.NotRequired[int] + drop_tokens_mode: typing.NotRequired[ + typing.Literal["right_to_left", "left_to_right", "both_sides:3"] + ] + enable_typos_for_numerical_tokens: typing.NotRequired[bool] + enable_typos_for_alpha_numerical_tokens: typing.NotRequired[bool] + synonym_num_typos: typing.NotRequired[int] + + +class CachingParameters(typing.TypedDict): + """ + Parameters regarding [caching search results](https://typesense.org/docs/26.0/api/search.html#caching-parameters). + + Attributes: + use_cache (bool): Use cache for search results. + cache_ttl (int): The duration (in seconds) that determines how long the search query is cached. + """ + + use_cache: typing.NotRequired[bool] + cache_ttl: typing.NotRequired[int] + + +class NLLanguageParameters(typing.TypedDict): + """ + Parameters regarding [caching search results](https://typesense.org/docs/26.0/api/search.html#caching-parameters). + + Attributes: + nl_query_prompt_cache_ttl (int): The duration (in seconds) that determines how long the schema prompts are cached. + nl_query (bool): Whether to use natural language in the query or not. + nl_model_id (str): The ID of the natural language model to use for the query. + nl_query_debug (bool): Whether to return the raw LLM response or not. + """ + + nl_query_prompt_cache_ttl: typing.NotRequired[int] + nl_query: typing.NotRequired[bool] + nl_model_id: typing.NotRequired[str] + nl_query_debug: typing.NotRequired[bool] + + +class SearchParameters( + RequiredSearchParameters, + QueryParameters, + FilterParameters, + RankingAndSortingParameters, + PaginationParameters, + FacetingParameters, + GroupingParameters, + ResultsParameters, + TypoToleranceParameters, + CachingParameters, + NLLanguageParameters, +): + """Parameters for searching documents.""" + + +class MultiSearchParameters(SearchParameters): + """ + Parameters for performing a [Federated/Multi-Search](https://typesense.org/docs/26.0/api/federated-multi-search.html#federated-multi-search). + + Attributes: + collection (str): Collection to search in. + + Plus all the parameters from `SearchParameters`. + """ + + collection: str + rerank_hybrid_matches: typing.NotRequired[bool] + + +class MultiSearchCommonParameters( + QueryParameters, + FilterParameters, + RankingAndSortingParameters, + PaginationParameters, + FacetingParameters, + GroupingParameters, + ResultsParameters, + TypoToleranceParameters, + CachingParameters, +): + """ + [Query parameters](https://typesense.org/docs/26.0/api/federated-multi-search.html#multi-search-parameters) for multi-search. + + Attributes: + query_by (str): Field to search in. + limit_multi_searches (int): Limit the number of multi-searches. + x_typesense_api_key (str): API key for Typesense. + + You can also use any of the parameters from `SearchParameters`. + """ + + query_by: typing.NotRequired[str] + limit_multi_searches: typing.NotRequired[int] + x_typesense_api_key: typing.NotRequired[str] + + +class GenerateScopedSearchKeyParams( + QueryParameters, + FilterParameters, + RankingAndSortingParameters, + PaginationParameters, + FacetingParameters, + GroupingParameters, + ResultsParameters, + TypoToleranceParameters, + CachingParameters, +): + """ + Parameters for generating a [scoped search key](https://typesense.org/docs/26.0/api/api-keys.html#generate-scoped-search-key). + + Attributes: + q (str): Query string to search for. + query_by (str): Field to search in. + filter_by (str): Filter to apply to search results. + expires_at (int): Expiry time (in UNIX timestamp format) for the scoped search key. + limit_multi_searches (int): Limit the number of multi-searches. + + You can also embed any of the parameters from `SearchParameters`. + """ + + q: typing.NotRequired[str] + query_by: typing.NotRequired[str] + expires_at: typing.NotRequired[int] + limit_multi_searches: typing.NotRequired[int] + + +class FacetCountSchema(typing.TypedDict): + """ + Schema for facet count. + + Attributes: + count (int): Number of occurrences of the facet value. + value (str): Value of the facet. + highlighted (str): Highlighted value of the facet. + """ + + count: int + value: str + highlighted: str + + +class FacetCountStats(typing.TypedDict): + """ + Statistics for facet count. + + Attributes: + min (float): Minimum value of the facet. + max (float): Maximum value of the facet. + avg (float): Average value of the facet. + sum (float): Sum of the facet values. + total_values (int): Total number of values. + """ + + min: typing.NotRequired[float] + max: typing.NotRequired[float] + avg: typing.NotRequired[float] + sum: typing.NotRequired[float] + total_values: typing.NotRequired[int] + + +class SearchResponseFacetCountSchema(typing.TypedDict): + """ + Schema for the search response facet count. + + Attributes: + counts (list[FacetCountSchema]): List of facet counts. + field_name (str): Name of the field. + stats (FacetCountStats): Statistics for the facet count. + """ + + counts: typing.List[FacetCountSchema] + field_name: str + stats: FacetCountStats + + +class Highlight(typing.TypedDict): + """ + Schema for highlighting search results. + + Attributes: + matched_tokens (list[str]): List of matched tokens. + snippet (str): Snippet of the matched tokens. + value (str): Value of the matched tokens. + """ + + matched_tokens: typing.List[str] + snippet: str + value: str + + +class HighlightExtended(Highlight): + """ + Extended schema for highlighting search results. + + Attributes: + field (str): Field to highlight. + + Plus all the parameters from `Highlight`. + """ + + field: str + + +class TextMatchInfo(typing.TypedDict): + """ + Schema for text match information. + + Attributes: + best_field_score (str): Best field score. + best_field_weight (int): Best field weight. + fields_matched (int): Number of fields matched. + score (str): Score of the text match. + typo_prefix_score (int): Typo prefix score. + num_tokens_dropped (int): Number of tokens dropped. + tokens_matched (int): Number of tokens matched. + """ + + best_field_score: str + best_field_weight: int + fields_matched: int + score: str + typo_prefix_score: int + num_tokens_dropped: int + tokens_matched: int + + +class Hit(typing.Generic[TDoc], typing.TypedDict): + """ + Schema for a hit in search results. + + Attributes: + document (TDoc): Document in the hit. + highlights (list[HighlightExtended]): List of highlights in the hit. + highlight (dict[str, Highlight]): Dictionary of highlights in the hit. + text_match (int): Text match in the hit. + text_match_info (TextMatchInfo): Text match information in the hit. + """ + + document: TDoc + highlights: typing.List[HighlightExtended] + highlight: typing.Dict[str, Highlight] + text_match: int + text_match_info: TextMatchInfo + + +class GroupedHit(typing.Generic[TDoc], typing.TypedDict): + """ + Schema for grouped hits in search results. + + Attributes: + group_key (list[str]): List of group keys. + hits (list[Hit[TDoc]]): List of hits in the group. + found (int): Number of hits found. + """ + + group_key: typing.List[str] + hits: typing.List[Hit[TDoc]] + found: typing.NotRequired[int] + + +class ConversationHistory(typing.TypedDict): + """ + Schema for a conversation's history in the search results. + + Attributes: + conversation (list[object]): List of conversation objects. + id (str): ID of the conversation. + last_updated (int): Last updated time of the conversation. + ttl (int): Time to live of the conversation. + """ + + conversation: typing.List[object] + id: str + last_updated: int + ttl: int + + +class Conversation(typing.TypedDict): + """ + Schema for a conversation in the search results. + + Attributes: + answer (str): Answer to the query. + conversation_history (ConversationHistory): Conversation history. + conversation_id (str): ID of the conversation. + query (str): Query of the conversation. + """ + + answer: str + conversation_history: ConversationHistory + conversation_id: str + query: str + + +class LLMResponse(typing.TypedDict): + """ + Schema for a raw LLM response. + + Attributes: + content (str): Content of the LLM response. + extraction_method (str): Extraction method of the LLM response (e.g. "regex"). + model (str): Model used to generate the response. + """ + + content: str + extraction_method: str + model: str + + +class ParsedNLQuery(typing.TypedDict): + """ + Schema for a parsed natural language query. + + Attributes: + parse_time_ms (int): Parse time in milliseconds. + generated_params (SearchParameters): Generated parameters. + augmented_params (SearchParameters): Augmented parameters. + llm_response (LLMResponse): Raw LLM response. + """ + + parse_time_ms: int + generated_params: SearchParameters + augmented_params: SearchParameters + llm_response: typing.NotRequired[LLMResponse] + + +class SearchResponse(typing.Generic[TDoc], typing.TypedDict): + """ + Schema for a search response. + + Attributes: + facet_counts (list[SearchResponseFacetCountSchema]): List of facet counts. + found (int): Number of documents found. + found_docs (int): Number of documents found. + page (int): Page number of the search results. + out_of (int): Number of documents found out of the whole dataset. + search_time_ms (int): Search time in milliseconds. + search_cutoff (bool): Search cutoff. + hits (list[Hit[TDoc]]): List of hits in the search results. + grouped_hits (list[GroupedHit[TDoc]]): List of grouped hits in the search results. + conversation (Conversation): Conversation in the search results. + parsed_nl_query (ParsedNLQuery): Information about the natural language query + """ + + facet_counts: typing.List[SearchResponseFacetCountSchema] + found: int + found_docs: typing.NotRequired[int] + page: int + out_of: int + search_time_ms: int + search_cutoff: typing.NotRequired[bool] + hits: typing.List[Hit[TDoc]] + grouped_hits: typing.NotRequired[typing.List[GroupedHit[TDoc]]] + conversation: typing.NotRequired[Conversation] + parsed_nl_query: typing.NotRequired[ParsedNLQuery] + + +class DeleteSingleDocumentParameters(typing.TypedDict): + """ + Parameters for deleting a single document. + + Attributes: + ignore_not_found (bool): Ignore not found documents. + """ + + ignore_not_found: typing.NotRequired[bool] + + +class TruncateDeleteParameters(typing.TypedDict): + """ + Parameters for truncating a collection (deleting all documents, keeping schema). + + Attributes: + truncate (bool): Truncate the collection, keeping just the schema. + """ + + truncate: bool + + +class FilterDeleteParameters(typing.TypedDict): + """ + Parameters for deleting documents by filter. + + Attributes: + filter_by (str): Filter to apply to documents. + batch_size (int): Batch size for deleting documents. + ignore_not_found (bool): Ignore not found documents. + """ + + filter_by: str + batch_size: typing.NotRequired[int] + ignore_not_found: typing.NotRequired[bool] + + +DeleteQueryParameters = typing.Union[TruncateDeleteParameters, FilterDeleteParameters] +""" +Discriminated union of parameters for deleting documents. + +Either: + - TruncateDeleteParameters: Use truncate to delete all documents, keeping the schema. + - FilterDeleteParameters: Use filter_by (and optionally batch_size, ignore_not_found) to delete specific documents. +""" + + +class DeleteResponse(typing.TypedDict): + """ + Response from deleting documents. + + Attributes: + num_deleted (int): Number of documents deleted. + """ + + num_deleted: int + + +class RetrieveParameters(typing.TypedDict): + """ + Parameters for retrieving documents. + + Attributes: + include_fields (str): Fields to include in the retrieved documents. + exclude_fields (str): Fields to exclude from the retrieved documents. + """ + + include_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] + exclude_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] diff --git a/src/typesense/types/key.py b/src/typesense/types/key.py new file mode 100644 index 0000000..51cb2a0 --- /dev/null +++ b/src/typesense/types/key.py @@ -0,0 +1,164 @@ +"""Types for API keys.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +_CollectionActions = typing.Literal[ + "collections:list", + "collections:get", + "collections:delete", + "collections:create", + "collections:*", +] + +_DocumentActions = typing.Literal[ + "documents:*", + "documents:export", + "documents:import", + "documents:delete", + "documents:update", + "documents:upsert", + "documents:create", + "documents:get", + "documents:search", +] + +_AliasActions = typing.Literal[ + "aliases:*", + "aliases:delete", + "aliases:create", + "aliases:get", + "aliases:list", +] + +_SynonymActions = typing.Literal[ + "synonyms:*", + "synonyms:delete", + "synonyms:create", + "synonyms:get", + "synonyms:list", +] + +_OverrideActions = typing.Literal[ + "overrides:*", + "overrides:delete", + "overrides:create", + "overrides:get", + "overrides:list", +] + +_StopwordActions = typing.Literal[ + "stopwords:*", + "stopwords:delete", + "stopwords:create", + "stopwords:get", + "stopwords:list", +] + +_KeyActions = typing.Literal[ + "keys:*", + "keys:delete", + "keys:create", + "keys:get", + "keys:list", +] + +_MiscActions = typing.Literal[ + "*", + "debug:list", + "stats.json:list", + "metrics.json:list", +] + +_Actions = typing.Union[ + _CollectionActions, + _DocumentActions, + _AliasActions, + _SynonymActions, + _OverrideActions, + _StopwordActions, + _KeyActions, + _MiscActions, +] + + +class ApiKeyCreateSchema(typing.TypedDict): + """ + Schema for creating a [new API key](https://typesense.org/docs/26.0/api/api-keys.html#create-an-api-key). + + Attributes: + actions (list[str]): The actions allowed for this key. + collections (list[str]): The collections this key has access to. + description (str): The description for this key. + value (str): The value of the key. + expires_at (int): The time in UNIX timestamp format when the key will expire. + autodelete (bool): Whether the key should be deleted after it expires. + """ + + actions: typing.List[_Actions] + collections: typing.List[str] + description: str + value: typing.NotRequired[str] + expires_at: typing.NotRequired[int] + autodelete: typing.NotRequired[bool] + + +class ApiKeyCreateResponseSchema(ApiKeyCreateSchema): + """ + Response schema for creating a [new API key](https://typesense.org/docs/26.0/api/api-keys.html#create-an-api-key). + + Attributes: + id (int): The ID of the key. + + Plus all the attributes from `ApiKeyCreateSchema`. + """ + + id: int + + +class ApiKeySchema(typing.TypedDict): + """ + Response schema for an [API key](https://typesense.org/docs/26.0/api/api-keys.html#retrieve-an-api-key). + + Attributes: + actions (list[str]): The actions allowed for this key. + collections (list[str]): The collections this key has access to. + description (str): The description for this key. + id (int): The ID of the key. + value_prefix (str): The value prefix of the key. + expires_at (int): The time in UNIX timestamp format when the key + """ + + actions: typing.List[_Actions] + collections: typing.List[str] + description: str + id: int + value_prefix: str + expires_at: int + + +class ApiKeyRetrieveSchema(typing.TypedDict): + """ + Response schema for retrieving [API keys](https://typesense.org/docs/26.0/api/api-keys.html#list-all-keys). + + Attributes: + keys (list[ApiKeySchema]): The list of keys. + """ + + keys: typing.List[ApiKeySchema] + + +class ApiKeyDeleteSchema(typing.TypedDict): + """ + Response schema for deleting an [API key](https://typesense.org/docs/26.0/api/api-keys.html#delete-api-key). + + Attributes: + id (int): The ID of the key. + """ + + id: int diff --git a/src/typesense/types/metrics.py b/src/typesense/types/metrics.py new file mode 100644 index 0000000..6b5a3f2 --- /dev/null +++ b/src/typesense/types/metrics.py @@ -0,0 +1,65 @@ +""" +Typed dictionaries for Typesense metrics responses. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class MetricsResponseBase(typing.TypedDict): + """ + Response schema for metrics retrieval. + + This TypedDict includes system metrics like CPU, memory, disk, and network usage, + as well as Typesense-specific memory metrics. + """ + + system_cpu_active_percentage: str + system_disk_total_bytes: str + system_disk_used_bytes: str + system_memory_total_bytes: str + system_memory_used_bytes: str + system_network_received_bytes: str + system_network_sent_bytes: str + typesense_memory_active_bytes: str + typesense_memory_allocated_bytes: str + typesense_memory_fragmentation_ratio: str + typesense_memory_mapped_bytes: str + typesense_memory_metadata_bytes: str + typesense_memory_resident_bytes: str + typesense_memory_retained_bytes: str + + +class MetricsResponse(MetricsResponseBase): + """Extended MetricsResponse with optional per-CPU core metrics.""" + + system_memory_total_swap_bytes: str + system_memory_used_swap_bytes: str + system_cpu1_active_percentage: typing.Optional[str] + system_cpu2_active_percentage: typing.Optional[str] + system_cpu3_active_percentage: typing.Optional[str] + system_cpu4_active_percentage: typing.Optional[str] + system_cpu5_active_percentage: typing.Optional[str] + system_cpu6_active_percentage: typing.Optional[str] + system_cpu7_active_percentage: typing.Optional[str] + system_cpu8_active_percentage: typing.Optional[str] + system_cpu9_active_percentage: typing.Optional[str] + system_cpu10_active_percentage: typing.Optional[str] + system_cpu11_active_percentage: typing.Optional[str] + system_cpu12_active_percentage: typing.Optional[str] + system_cpu13_active_percentage: typing.Optional[str] + system_cpu14_active_percentage: typing.Optional[str] + system_cpu15_active_percentage: typing.Optional[str] + system_cpu16_active_percentage: typing.Optional[str] + system_cpu17_active_percentage: typing.Optional[str] + system_cpu18_active_percentage: typing.Optional[str] + system_cpu19_active_percentage: typing.Optional[str] + system_cpu20_active_percentage: typing.Optional[str] + system_cpu21_active_percentage: typing.Optional[str] + system_cpu22_active_percentage: typing.Optional[str] + system_cpu23_active_percentage: typing.Optional[str] + system_cpu24_active_percentage: typing.Optional[str] diff --git a/src/typesense/types/multi_search.py b/src/typesense/types/multi_search.py new file mode 100644 index 0000000..3619c0b --- /dev/null +++ b/src/typesense/types/multi_search.py @@ -0,0 +1,33 @@ +"""Types for multi-search.""" + +import sys + +from typesense.types.document import MultiSearchParameters, SearchResponse + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class MultiSearchResponse(typing.TypedDict): + """ + Response schema for multi-search. + + Attributes: + results (list[SearchResponse]): The search results. + """ + + results: typing.List[SearchResponse[typing.Any]] # noqa: WPS110 + + +class MultiSearchRequestSchema(typing.TypedDict): + """ + Schema for multi-search request. + + Attributes: + searches (list[MultiSearchParameters]): The search parameters. + """ + + union: typing.NotRequired[typing.Literal[True]] + searches: typing.List[MultiSearchParameters] diff --git a/src/typesense/types/nl_search_model.py b/src/typesense/types/nl_search_model.py new file mode 100644 index 0000000..5ad4570 --- /dev/null +++ b/src/typesense/types/nl_search_model.py @@ -0,0 +1,140 @@ +"""NLSearchModel types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class NLSearchModelBase(typing.TypedDict): + """ + Base schema with all possible fields for NL search models. + + Attributes: + model_name (str): Name of the LLM model. + api_key (str): The LLM service's API Key. + api_url (str): The API URL for the LLM service. + max_bytes (int): The maximum number of bytes to send to the LLM. + temperature (float): The temperature parameter for the LLM. + system_prompt (str): The system prompt for the LLM. + top_p (float): The top_p parameter (Google-specific). + top_k (int): The top_k parameter (Google-specific). + stop_sequences (list[str]): Stop sequences for the LLM (Google-specific). + api_version (str): API version (Google-specific). + project_id (str): GCP project ID (GCP Vertex AI specific). + access_token (str): Access token for GCP (GCP Vertex AI specific). + refresh_token (str): Refresh token for GCP (GCP Vertex AI specific). + client_id (str): Client ID for GCP (GCP Vertex AI specific). + client_secret (str): Client secret for GCP (GCP Vertex AI specific). + region (str): Region for GCP (GCP Vertex AI specific). + max_output_tokens (int): Maximum output tokens (GCP Vertex AI specific). + account_id (str): Account ID (Cloudflare specific). + """ + + model_name: str + api_key: typing.NotRequired[str] + api_url: typing.NotRequired[str] + max_bytes: typing.NotRequired[int] + temperature: typing.NotRequired[float] + system_prompt: typing.NotRequired[str] + # Google-specific parameters + top_p: typing.NotRequired[float] + top_k: typing.NotRequired[int] + stop_sequences: typing.NotRequired[typing.List[str]] + api_version: typing.NotRequired[str] + # GCP Vertex AI specific + project_id: typing.NotRequired[str] + access_token: typing.NotRequired[str] + refresh_token: typing.NotRequired[str] + client_id: typing.NotRequired[str] + client_secret: typing.NotRequired[str] + region: typing.NotRequired[str] + max_output_tokens: typing.NotRequired[int] + # Cloudflare specific + account_id: typing.NotRequired[str] + + +class NLSearchModelCreateSchema(NLSearchModelBase): + """ + Schema for creating a new NL search model. + + Attributes: + id (str): The custom ID of the model. + """ + + id: typing.NotRequired[str] + + +class NLSearchModelUpdateSchema(typing.TypedDict): + """ + Base schema with all possible fields for NL search models. + + Attributes: + model_name (str): Name of the LLM model. + api_key (str): The LLM service's API Key. + api_url (str): The API URL for the LLM service. + max_bytes (int): The maximum number of bytes to send to the LLM. + temperature (float): The temperature parameter for the LLM. + system_prompt (str): The system prompt for the LLM. + top_p (float): The top_p parameter (Google-specific). + top_k (int): The top_k parameter (Google-specific). + stop_sequences (list[str]): Stop sequences for the LLM (Google-specific). + api_version (str): API version (Google-specific). + project_id (str): GCP project ID (GCP Vertex AI specific). + access_token (str): Access token for GCP (GCP Vertex AI specific). + refresh_token (str): Refresh token for GCP (GCP Vertex AI specific). + client_id (str): Client ID for GCP (GCP Vertex AI specific). + client_secret (str): Client secret for GCP (GCP Vertex AI specific). + region (str): Region for GCP (GCP Vertex AI specific). + max_output_tokens (int): Maximum output tokens (GCP Vertex AI specific). + account_id (str): Account ID (Cloudflare specific). + """ + + model_name: typing.NotRequired[str] + api_key: typing.NotRequired[str] + api_url: typing.NotRequired[str] + max_bytes: typing.NotRequired[int] + temperature: typing.NotRequired[float] + system_prompt: typing.NotRequired[str] + # Google-specific parameters + top_p: typing.NotRequired[float] + top_k: typing.NotRequired[int] + stop_sequences: typing.NotRequired[typing.List[str]] + api_version: typing.NotRequired[str] + # GCP Vertex AI specific + project_id: typing.NotRequired[str] + access_token: typing.NotRequired[str] + refresh_token: typing.NotRequired[str] + client_id: typing.NotRequired[str] + client_secret: typing.NotRequired[str] + region: typing.NotRequired[str] + max_output_tokens: typing.NotRequired[int] + # Cloudflare specific + account_id: typing.NotRequired[str] + + +class NLSearchModelDeleteSchema(typing.TypedDict): + """ + Schema for deleting an NL search model. + + Attributes: + id (str): The ID of the model. + """ + + id: str + + +class NLSearchModelSchema(NLSearchModelBase): + """ + Schema for an NL search model. + + Attributes: + id (str): The ID of the model. + """ + + id: str + + +NLSearchModelsRetrieveSchema = typing.List[NLSearchModelSchema] diff --git a/src/typesense/types/operations.py b/src/typesense/types/operations.py new file mode 100644 index 0000000..e2a03a3 --- /dev/null +++ b/src/typesense/types/operations.py @@ -0,0 +1,67 @@ +"""Types for operations.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class SnapshotParameters(typing.TypedDict): + """ + Parameters for creating a snapshot. + + Attributes: + snapshot_path (str): The path where the snapshot is stored. + """ + + snapshot_path: str + + +class LogSlowRequestsTimeParams(typing.TypedDict): + """ + Parameters for logging slow requests. + + Attributes: + log_slow_requests_time_ms (int): The time in milliseconds to log slow requests. + """ + + log_slow_requests_time_ms: int + + +class HealthCheckResponse(typing.TypedDict): + """ + Response schema for the health check. + + Attributes: + ok (bool): The status of the health check. + """ + + ok: bool + + +class SchemaChangesResponse(typing.TypedDict): + """ + Response schema for schema changes. + + Attributes: + collection (str): The name of the collection. + validated_docs (int): The number of validated documents. + altered_docs (int): The number of altered documents + """ + + collection: str + validated_docs: int + altered_docs: int + + +class OperationResponse(typing.TypedDict): + """ + Response schema for operations. + + Attributes: + success (bool): The status of the operation. + """ + + success: bool diff --git a/src/typesense/types/override.py b/src/typesense/types/override.py new file mode 100644 index 0000000..134716c --- /dev/null +++ b/src/typesense/types/override.py @@ -0,0 +1,100 @@ +"""Override types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class OverrideQueryRuleSchema(typing.TypedDict): + """ + The schema for the rule field in the Overrides.upsert method. + + Attributes: + query (str): The query string. + match (typing.Literal['contains', 'exact']): The match type. + filter_by (str): The filter string. + tags (list[str]): The tags list. + """ + + query: str + match: typing.Literal["contains", "exact"] + filter_by: typing.NotRequired[str] + tags: typing.NotRequired[typing.List[str]] + + +class OverrideFilterSchema(typing.TypedDict): + """ + The schema for the rule field in the Overrides.upsert method. + + Attributes: + filter_by (str): The filter string. + tags (list[str]): The tags list. + """ + + filter_by: str + tags: typing.NotRequired[typing.List[str]] + + +class IncludesSchema(typing.TypedDict): + """ + The schema for the includes field in the Overrides.upsert method. + + Attributes: + id (str): The ID of the document. + position (int): The position of the ID in the response. + """ + + id: str + position: int + + +class OverrideCreateSchema(typing.TypedDict): + """ + The schema for the request of the Overrides.upsert method. + + Attributes: + rule (OverrideQueryRuleSchema | OverrideFilterSchema): The rule. + sort_by (str): The sort by string. + filter_by (str): The filter by string. + excludes (list[str]): The excludes list. + replace_query (str): The replace query string. + includes (list[IncludesSchema]): The includes list. + metadata (dict[str, str]): The metadata dictionary. + filter_curated_hits (bool): Whether to filter curated hits. + effective_from_ts (int): The effective from timestamp. + effective_to_ts (int): The effective to timestamp. + stop_processing (bool): Whether to stop processing. + """ + + rule: typing.Union[OverrideQueryRuleSchema, OverrideFilterSchema] + sort_by: typing.NotRequired[str] + filter_by: typing.NotRequired[str] + excludes: typing.NotRequired[typing.List[str]] + replace_query: typing.NotRequired[str] + includes: typing.NotRequired[typing.List[IncludesSchema]] + metadata: typing.NotRequired[typing.Dict[str, str]] + filter_curated_hits: typing.NotRequired[bool] + effective_from_ts: typing.NotRequired[int] + effective_to_ts: typing.NotRequired[int] + stop_processing: typing.NotRequired[bool] + + +class OverrideSchema(OverrideCreateSchema): + """The schema for the response of the Overrides.upsert method.""" + + id: str + + +class OverrideDeleteSchema(typing.TypedDict): + """The schema for the response of the Overrides.delete method.""" + + id: str + + +class OverrideRetrieveSchema(typing.TypedDict): + """The schema for the response of the Overrides.retrieve method.""" + + overrides: typing.List[OverrideSchema] diff --git a/src/typesense/types/stemming.py b/src/typesense/types/stemming.py new file mode 100644 index 0000000..2cc3dd9 --- /dev/null +++ b/src/typesense/types/stemming.py @@ -0,0 +1,45 @@ +"""Stemming types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class StemmingDictionaryCreateSchema(typing.TypedDict): + """ + Schema for creating a [stemming dictionary](https://typesense.org/docs/28/api/stemming.html#creating-a-stemming-dictionary). + + Attributes: + name (str): The name of the stemming dictionary. + words (list[str]): The list of words in the stemming dictionary. + """ + + word: str + root: str + + +class StemmingDictionarySchema(typing.TypedDict): + """ + Schema for a stemming dictionary. + + Attributes: + id (str): The ID of the stemming dictionary. + words (list[StemmingDictionarySchema]): The list of words and their roots in the stemming dictionary. + """ + + id: str + words: typing.List[StemmingDictionaryCreateSchema] + + +class StemmingDictionariesRetrieveSchema(typing.TypedDict): + """ + Schema for retrieving stemming dictionaries. + + Attributes: + data (list[str]): The list of stemming dictionary names. + """ + + dictionaries: typing.List[str] diff --git a/src/typesense/types/stopword.py b/src/typesense/types/stopword.py new file mode 100644 index 0000000..273adb4 --- /dev/null +++ b/src/typesense/types/stopword.py @@ -0,0 +1,66 @@ +"""Stopword types for Typesense Python Client.""" + +import sys + +from typesense.types.collection import Locales + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class StopwordCreateSchema(typing.TypedDict): + """ + Schema for creating a new stopword. + + Attributes: + stopwords (list[str]): The stopwords to be added. + """ + + stopwords: typing.List[str] + locale: typing.NotRequired[Locales] + + +class StopwordSchema(StopwordCreateSchema): + """ + Schema for a stopword. + + Attributes: + stopwords (list[str]): The stopwords to be added. + """ + + id: str + + +class StopwordsSingleRetrieveSchema(typing.TypedDict): + """ + Response schema for retrieving a single stopword. + + Attributes: + stopwords (StopwordSchema): The Stopword. + """ + + stopwords: StopwordSchema + + +class StopwordsRetrieveSchema(typing.TypedDict): + """ + Response schema for retrieving stopwords. + + Attributes: + stopwords (list[str]): The list of stopwords. + """ + + stopwords: typing.List[StopwordSchema] + + +class StopwordDeleteSchema(typing.TypedDict): + """ + Response schema for deleting a stopword. + + Attributes: + id (str): The ID of the stopword. + """ + + id: str diff --git a/src/typesense/types/synonym.py b/src/typesense/types/synonym.py new file mode 100644 index 0000000..718df5c --- /dev/null +++ b/src/typesense/types/synonym.py @@ -0,0 +1,71 @@ +"""Synonym types for Typesense Python Client.""" + +import sys + +from typesense.types.collection import Locales + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class SynonymCreateSchema(typing.TypedDict): + """ + The schema for the request of the Synonyms.upsert method. + + Attributes: + synonyms (list[str]): The synonyms list. + + root (str): The root string. + + locale (Locales): The locale. + + symbols_to_index (list[str]): The symbols to index. + """ + + synonyms: typing.List[str] + root: typing.NotRequired[str] + locale: typing.NotRequired[Locales] + symbols_to_index: typing.NotRequired[typing.List[str]] + + +class SynonymSchema(SynonymCreateSchema): + """ + The schema for the response of the Synonyms.upsert method. + + Attributes: + id (str): The ID of the synonym. + + synonyms (list[str]): The synonyms list. + + root (str): The root string. + + locale (Locales): The locale. + + symbols_to_index (list[str]): The symbols to index. + """ + + id: str + + +class SynonymsRetrieveSchema(typing.TypedDict): + """ + The schema for the response of the Synonyms.retrieve method. + + Attributes: + synonyms(list[SynonymSchema]): The list of synonyms. + """ + + synonyms: typing.List[SynonymSchema] + + +class SynonymDeleteSchema(typing.TypedDict): + """ + The schema for the response of the Synonyms.delete method. + + Attributes: + id (str): The ID of the synonym. + """ + + id: str diff --git a/src/typesense/types/synonym_set.py b/src/typesense/types/synonym_set.py new file mode 100644 index 0000000..d036411 --- /dev/null +++ b/src/typesense/types/synonym_set.py @@ -0,0 +1,78 @@ +"""Synonym Set types for Typesense Python Client.""" + +import sys + +from typesense.types.collection import Locales + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class SynonymItemSchema(typing.TypedDict): + """ + Schema representing an individual synonym item inside a synonym set. + + Attributes: + id (str): Unique identifier for the synonym item. + synonyms (list[str]): The synonyms array. + root (str, optional): For 1-way synonyms, indicates the root word that words in + the synonyms parameter map to. + locale (Locales, optional): Locale for the synonym. + symbols_to_index (list[str], optional): Symbols to index as-is in synonyms. + """ + + id: str + synonyms: typing.List[str] + root: typing.NotRequired[str] + locale: typing.NotRequired[Locales] + symbols_to_index: typing.NotRequired[typing.List[str]] + + +class SynonymItemDeleteSchema(typing.TypedDict): + """ + Schema for deleting a synonym item. + """ + + id: str + + +class SynonymSetCreateSchema(typing.TypedDict): + """ + Schema for creating or updating a synonym set. + + Attributes: + items (list[SynonymItemSchema]): Array of synonym items. + """ + + items: typing.List[SynonymItemSchema] + + +class SynonymSetSchema(SynonymSetCreateSchema): + """ + Schema representing a synonym set. + + Attributes: + name (str): Name of the synonym set. + """ + + name: str + + +class SynonymSetsRetrieveSchema(typing.List[SynonymSetSchema]): + """Deprecated alias for list of synonym sets; use List[SynonymSetSchema] directly.""" + + +class SynonymSetRetrieveSchema(SynonymSetCreateSchema): + """Response schema for retrieving a single synonym set by name.""" + + +class SynonymSetDeleteSchema(typing.TypedDict): + """Response schema for deleting a synonym set. + + Attributes: + name (str): Name of the deleted synonym set. + """ + + name: str diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..b12da74 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for the Typesense Python Client.""" diff --git a/tests/alias_test.py b/tests/alias_test.py new file mode 100644 index 0000000..519a130 --- /dev/null +++ b/tests/alias_test.py @@ -0,0 +1,119 @@ +"""Tests for the Alias class.""" + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.alias import Alias +from typesense.sync.aliases import Aliases +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.alias import AsyncAlias +from typesense.async_.aliases import AsyncAliases + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Alias object is initialized correctly.""" + alias = Alias(fake_api_call, "company_alias") + + assert alias.name == "company_alias" + assert_match_object(alias.api_call, fake_api_call) + assert_object_lists_match( + alias.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + alias.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert alias._endpoint_path == "/aliases/company_alias" # noqa: WPS437 + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncAlias object is initialized correctly.""" + alias = AsyncAlias(fake_async_api_call, "company_alias") + + assert alias.name == "company_alias" + assert_match_object(alias.api_call, fake_async_api_call) + assert_object_lists_match( + alias.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + alias.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert alias._endpoint_path == "/aliases/company_alias" # noqa: WPS437 + + +def test_actual_retrieve( + actual_aliases: Aliases, + delete_all_aliases: None, + delete_all: None, + create_alias: None, +) -> None: + """Test that the Alias object can retrieve an alias from Typesense Server.""" + response = actual_aliases["company_alias"].retrieve() + + assert response["collection_name"] == "companies" + assert response["name"] == "company_alias" + + assert_to_contain_object( + response, + { + "collection_name": "companies", + "name": "company_alias", + }, + ) + + +def test_actual_delete( + actual_aliases: Aliases, + delete_all_aliases: None, + delete_all: None, + create_alias: None, +) -> None: + """Test that the Alias object can delete an alias from Typesense Server.""" + response = actual_aliases["company_alias"].delete() + + assert response == { + "collection_name": "companies", + "name": "company_alias", + } + + +async def test_actual_retrieve_async( + actual_async_aliases: AsyncAliases, + delete_all_aliases: None, + delete_all: None, + create_alias: None, +) -> None: + """Test that the AsyncAlias object can retrieve an alias from Typesense Server.""" + response = await actual_async_aliases["company_alias"].retrieve() + + assert response["collection_name"] == "companies" + assert response["name"] == "company_alias" + + assert_to_contain_object( + response, + { + "collection_name": "companies", + "name": "company_alias", + }, + ) + + +async def test_actual_delete_async( + actual_async_aliases: AsyncAliases, + delete_all_aliases: None, + delete_all: None, + create_alias: None, +) -> None: + """Test that the AsyncAlias object can delete an alias from Typesense Server.""" + response = await actual_async_aliases["company_alias"].delete() + + assert response == { + "collection_name": "companies", + "name": "company_alias", + } diff --git a/tests/aliases_test.py b/tests/aliases_test.py new file mode 100644 index 0000000..02c9e14 --- /dev/null +++ b/tests/aliases_test.py @@ -0,0 +1,206 @@ +"""Tests for the Aliases class.""" + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.aliases import Aliases +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.aliases import AsyncAliases + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Aliases object is initialized correctly.""" + aliases = Aliases(fake_api_call) + + assert_match_object(aliases.api_call, fake_api_call) + assert_object_lists_match( + aliases.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + aliases.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not aliases.aliases + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncAliases object is initialized correctly.""" + aliases = AsyncAliases(fake_async_api_call) + + assert_match_object(aliases.api_call, fake_async_api_call) + assert_object_lists_match( + aliases.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + aliases.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not aliases.aliases + + +def test_get_missing_alias(fake_aliases: Aliases) -> None: + """Test that the Aliases object can get a missing alias.""" + alias = fake_aliases["company_alias"] + + assert alias.name == "company_alias" + assert_match_object(alias.api_call, fake_aliases.api_call) + assert_object_lists_match( + alias.api_call.node_manager.nodes, fake_aliases.api_call.node_manager.nodes + ) + assert_match_object( + alias.api_call.config.nearest_node, + fake_aliases.api_call.config.nearest_node, + ) + assert alias._endpoint_path == "/aliases/company_alias" # noqa: WPS437 + + +def test_get_missing_alias_async(fake_async_aliases: AsyncAliases) -> None: + """Test that the AsyncAliases object can get a missing alias.""" + alias = fake_async_aliases["company_alias"] + + assert alias.name == "company_alias" + assert_match_object(alias.api_call, fake_async_aliases.api_call) + assert_object_lists_match( + alias.api_call.node_manager.nodes, + fake_async_aliases.api_call.node_manager.nodes, + ) + assert_match_object( + alias.api_call.config.nearest_node, + fake_async_aliases.api_call.config.nearest_node, + ) + assert alias._endpoint_path == "/aliases/company_alias" # noqa: WPS437 + + +def test_get_existing_alias(fake_aliases: Aliases) -> None: + """Test that the Aliases object can get an existing alias.""" + alias = fake_aliases["companies"] + fetched_alias = fake_aliases["companies"] + + assert len(fake_aliases.aliases) == 1 + + assert alias is fetched_alias + + +def test_get_existing_alias_async(fake_async_aliases: AsyncAliases) -> None: + """Test that the AsyncAliases object can get an existing alias.""" + alias = fake_async_aliases["companies"] + fetched_alias = fake_async_aliases["companies"] + + assert len(fake_async_aliases.aliases) == 1 + + assert alias is fetched_alias + + +def test_actual_create(actual_aliases: Aliases, delete_all_aliases: None) -> None: + """Test that the Aliases object can create an alias on Typesense Server.""" + response = actual_aliases.upsert("company_alias", {"collection_name": "companies"}) + + assert response == {"collection_name": "companies", "name": "company_alias"} + + +def test_actual_update( + actual_aliases: Aliases, + delete_all_aliases: None, + delete_all: None, + create_collection: None, + create_another_collection: None, +) -> None: + """Test that the Aliases object can update an alias on Typesense Server.""" + create_response = actual_aliases.upsert( + "company_alias", + {"collection_name": "companies"}, + ) + + assert create_response == {"collection_name": "companies", "name": "company_alias"} + + update_response = actual_aliases.upsert( + "company_alias", + {"collection_name": "companies_2"}, + ) + + assert update_response == { + "collection_name": "companies_2", + "name": "company_alias", + } + + +def test_actual_retrieve( + delete_all: None, + delete_all_aliases: None, + create_alias: None, + actual_aliases: Aliases, +) -> None: + """Test that the Aliases object can retrieve an alias from Typesense Server.""" + response = actual_aliases.retrieve() + + assert len(response["aliases"]) == 1 + assert_to_contain_object( + response["aliases"][0], + { + "collection_name": "companies", + "name": "company_alias", + }, + ) + + +async def test_actual_create_async( + actual_async_aliases: AsyncAliases, delete_all_aliases: None +) -> None: + """Test that the AsyncAliases object can create an alias on Typesense Server.""" + response = await actual_async_aliases.upsert( + "company_alias", {"collection_name": "companies"} + ) + + assert response == {"collection_name": "companies", "name": "company_alias"} + + +async def test_actual_update_async( + actual_async_aliases: AsyncAliases, + delete_all_aliases: None, + delete_all: None, + create_collection: None, + create_another_collection: None, +) -> None: + """Test that the AsyncAliases object can update an alias on Typesense Server.""" + create_response = await actual_async_aliases.upsert( + "company_alias", + {"collection_name": "companies"}, + ) + + assert create_response == {"collection_name": "companies", "name": "company_alias"} + + update_response = await actual_async_aliases.upsert( + "company_alias", + {"collection_name": "companies_2"}, + ) + + assert update_response == { + "collection_name": "companies_2", + "name": "company_alias", + } + + +async def test_actual_retrieve_async( + delete_all: None, + delete_all_aliases: None, + create_alias: None, + actual_async_aliases: AsyncAliases, +) -> None: + """Test that the AsyncAliases object can retrieve an alias from Typesense Server.""" + response = await actual_async_aliases.retrieve() + + assert len(response["aliases"]) == 1 + assert_to_contain_object( + response["aliases"][0], + { + "collection_name": "companies", + "name": "company_alias", + }, + ) diff --git a/tests/analytics_events_test.py b/tests/analytics_events_test.py new file mode 100644 index 0000000..49f11a3 --- /dev/null +++ b/tests/analytics_events_test.py @@ -0,0 +1,208 @@ +"""Tests for Analytics events endpoints (client.analytics.events).""" + +import pytest + +from tests.utils.version import is_v30_or_above +from typesense.async_.analytics_events import AsyncAnalyticsEvents +from typesense.async_.analytics_rules import AsyncAnalyticsRules +from typesense.sync.client import Client +from typesense.types.analytics import AnalyticsEvent + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run analytics events tests only on v30+", +) + + +def test_actual_create_event( + actual_client: Client, + delete_all: None, + create_collection: None, + delete_all_analytics_rules: None, +) -> None: + actual_client.analytics.rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = actual_client.analytics.events.create(event) + assert resp["ok"] is True + actual_client.analytics.rules["company_analytics_rule"].delete() + + +def test_status(actual_client: Client, delete_all: None) -> None: + status = actual_client.analytics.events.status() + assert isinstance(status, dict) + + +def test_retrieve_events( + actual_client: Client, delete_all: None, delete_all_analytics_rules: None +) -> None: + actual_client.collections.create( + { + "name": "companies", + "fields": [ + {"name": "user_id", "type": "string"}, + ], + } + ) + + actual_client.analytics.rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = actual_client.analytics.events.create(event) + assert resp["ok"] is True + result = actual_client.analytics.events.retrieve( + user_id="user-1", + name="company_analytics_rule", + n=10, + ) + assert "events" in result + + +def test_acutal_retrieve_events( + actual_client: Client, + delete_all: None, + create_collection: None, + delete_all_analytics_rules: None, +) -> None: + actual_client.analytics.rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = actual_client.analytics.events.create(event) + assert resp["ok"] is True + result = actual_client.analytics.events.retrieve( + user_id="user-1", name="company_analytics_rule", n=10 + ) + assert "events" in result + + +def test_acutal_flush(actual_client: Client, delete_all: None) -> None: + resp = actual_client.analytics.events.flush() + assert resp["ok"] in [True, False] + + +async def test_actual_create_event_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + actual_async_analytics_events: AsyncAnalyticsEvents, + delete_all: None, + create_collection: None, + delete_all_analytics_rules: None, +) -> None: + await actual_async_analytics_rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = await actual_async_analytics_events.create(event) + assert resp["ok"] is True + await actual_async_analytics_rules["company_analytics_rule"].delete() + + +async def test_status_async( + actual_async_analytics_events: AsyncAnalyticsEvents, + delete_all: None, +) -> None: + status = await actual_async_analytics_events.status() + assert isinstance(status, dict) + + +async def test_retrieve_events_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + actual_async_analytics_events: AsyncAnalyticsEvents, + delete_all: None, + create_collection: None, + delete_all_analytics_rules: None, +) -> None: + await actual_async_analytics_rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = await actual_async_analytics_events.create(event) + assert resp["ok"] is True + result = await actual_async_analytics_events.retrieve( + user_id="user-1", + name="company_analytics_rule", + n=10, + ) + assert "events" in result + + +async def test_actual_flush_async( + actual_async_analytics_events: AsyncAnalyticsEvents, + delete_all: None, +) -> None: + resp = await actual_async_analytics_events.flush() + assert resp["ok"] in [True, False] diff --git a/tests/analytics_rule_test.py b/tests/analytics_rule_test.py new file mode 100644 index 0000000..b491150 --- /dev/null +++ b/tests/analytics_rule_test.py @@ -0,0 +1,61 @@ +"""Unit tests for per-rule AnalyticsRule operations.""" + +import pytest + +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from typesense.sync.analytics_rules import AnalyticsRules +from typesense.async_.analytics_rules import AsyncAnalyticsRules + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run analytics tests only on v30+", +) + + +def test_actual_rule_retrieve( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + resp = actual_analytics_rules["company_analytics_rule"].retrieve() + assert resp["name"] == "company_analytics_rule" + + +def test_actual_rule_delete( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + resp = actual_analytics_rules["company_analytics_rule"].delete() + assert resp["name"] == "company_analytics_rule" + + +async def test_actual_rule_retrieve_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + resp = await actual_async_analytics_rules["company_analytics_rule"].retrieve() + assert resp["name"] == "company_analytics_rule" + + +async def test_actual_rule_delete_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + resp = await actual_async_analytics_rules["company_analytics_rule"].delete() + assert resp["name"] == "company_analytics_rule" diff --git a/tests/analytics_rule_v1_test.py b/tests/analytics_rule_v1_test.py new file mode 100644 index 0000000..16692fb --- /dev/null +++ b/tests/analytics_rule_v1_test.py @@ -0,0 +1,121 @@ +"""Tests for the AnalyticsRuleV1 class.""" + + +import pytest + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from typesense.sync.analytics_rule_v1 import AnalyticsRuleV1 +from typesense.sync.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.sync.api_call import ApiCall +from typesense.async_.analytics_rules_v1 import AsyncAnalyticsRulesV1 +from typesense.types.analytics_rule_v1 import RuleDeleteSchema, RuleSchemaForQueries + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip AnalyticsV1 tests on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsRuleV1 object is initialized correctly.""" + analytics_rule = AnalyticsRuleV1(fake_api_call, "company_analytics_rule") + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_api_call) + assert_object_lists_match( + analytics_rule.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRuleV1 object can retrieve a rule from Typesense Server.""" + response = actual_analytics_rules["company_analytics_rule"].retrieve() + + expected: RuleSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + assert response == expected + + +def test_actual_delete( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRuleV1 object can delete a rule from Typesense Server.""" + response = actual_analytics_rules["company_analytics_rule"].delete() + + expected: RuleDeleteSchema = { + "name": "company_analytics_rule", + } + assert response == expected + + +async def test_actual_retrieve_async( + actual_async_analytics_rules_v1: AsyncAnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AsyncAnalyticsRuleV1 object can retrieve a rule from Typesense Server.""" + response = await actual_async_analytics_rules_v1[ + "company_analytics_rule" + ].retrieve() + + expected: RuleSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + assert response == expected + + +async def test_actual_delete_async( + actual_async_analytics_rules_v1: AsyncAnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AsyncAnalyticsRuleV1 object can delete a rule from Typesense Server.""" + response = await actual_async_analytics_rules_v1["company_analytics_rule"].delete() + + expected: RuleDeleteSchema = { + "name": "company_analytics_rule", + } + assert response == expected diff --git a/tests/analytics_rules_test.py b/tests/analytics_rules_test.py new file mode 100644 index 0000000..a3a22d2 --- /dev/null +++ b/tests/analytics_rules_test.py @@ -0,0 +1,150 @@ +"""Tests for v30 Analytics Rules endpoints (client.analytics.rules).""" + + +import pytest + +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from typesense.sync.analytics_rules import AnalyticsRules +from typesense.sync.analytics_rule import AnalyticsRule +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.analytics_rules import AsyncAnalyticsRules +from typesense.types.analytics import AnalyticsRuleCreate + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run v30 analytics tests only on v30+", +) + + +def test_rules_init(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) + assert rules.rules == {} + + +def test_rule_getitem(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) + rule = rules["company_analytics_rule"] + assert isinstance(rule, AnalyticsRule) + assert rule._endpoint_path == "/analytics/rules/company_analytics_rule" + + +def test_actual_create( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_collection: None, + create_query_collection: None, +) -> None: + body: AnalyticsRuleCreate = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "collection": "companies", + "event_type": "search", + "params": {"destination_collection": "companies_queries", "limit": 1000}, + } + resp = actual_analytics_rules.create(rule=body) + assert resp["name"] == "company_analytics_rule" + assert resp["params"]["destination_collection"] == "companies_queries" + + +def test_actual_update( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + resp = actual_analytics_rules.upsert( + "company_analytics_rule", + { + "params": { + "destination_collection": "companies_queries", + "limit": 500, + }, + }, + ) + assert resp["name"] == "company_analytics_rule" + + +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + rules = actual_analytics_rules.retrieve() + assert isinstance(rules, list) + assert any(r.get("name") == "company_analytics_rule" for r in rules) + + +def test_rules_init_async(fake_async_api_call: AsyncApiCall) -> None: + from typesense.async_.analytics_rules import AsyncAnalyticsRules + + rules = AsyncAnalyticsRules(fake_async_api_call) + assert rules.rules == {} + + +def test_rule_getitem_async(fake_async_api_call: AsyncApiCall) -> None: + from typesense.async_.analytics_rules import AsyncAnalyticsRules + from typesense.async_.analytics_rule import AsyncAnalyticsRule + + rules = AsyncAnalyticsRules(fake_async_api_call) + rule = rules["company_analytics_rule"] + assert isinstance(rule, AsyncAnalyticsRule) + assert rule._endpoint_path == "/analytics/rules/company_analytics_rule" + + +async def test_actual_create_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_collection: None, + create_query_collection: None, +) -> None: + body: AnalyticsRuleCreate = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "collection": "companies", + "event_type": "search", + "params": {"destination_collection": "companies_queries", "limit": 1000}, + } + resp = await actual_async_analytics_rules.create(rule=body) + assert resp["name"] == "company_analytics_rule" + assert resp["params"]["destination_collection"] == "companies_queries" + + +async def test_actual_update_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + resp = await actual_async_analytics_rules.upsert( + "company_analytics_rule", + { + "params": { + "destination_collection": "companies_queries", + "limit": 500, + }, + }, + ) + assert resp["name"] == "company_analytics_rule" + + +async def test_actual_retrieve_async( + actual_async_analytics_rules: AsyncAnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + rules = await actual_async_analytics_rules.retrieve() + assert isinstance(rules, list) + assert any(r.get("name") == "company_analytics_rule" for r in rules) diff --git a/tests/analytics_rules_v1_test.py b/tests/analytics_rules_v1_test.py new file mode 100644 index 0000000..d222d99 --- /dev/null +++ b/tests/analytics_rules_v1_test.py @@ -0,0 +1,291 @@ +"""Tests for the AnalyticsRulesV1 class.""" + +import pytest + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from typesense.sync.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.analytics_rules_v1 import AsyncAnalyticsRulesV1 + + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip AnalyticsV1 tests on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsRulesV1 object is initialized correctly.""" + analytics_rules = AnalyticsRulesV1(fake_api_call) + + assert_match_object(analytics_rules.api_call, fake_api_call) + assert_object_lists_match( + analytics_rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics_rules.rules + + +def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> None: + """Test that the AnalyticsRulesV1 object can get a missing analytics_rule.""" + analytics_rule = fake_analytics_rules["company_analytics_rule"] + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_analytics_rules.api_call) + assert_object_lists_match( + analytics_rule.api_call.node_manager.nodes, + fake_analytics_rules.api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_analytics_rules.api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +def test_get_existing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> None: + """Test that the AnalyticsRulesV1 object can get an existing analytics_rule.""" + analytics_rule = fake_analytics_rules["company_analytics_rule"] + fetched_analytics_rule = fake_analytics_rules["company_analytics_rule"] + + assert len(fake_analytics_rules.rules) == 1 + + assert analytics_rule is fetched_analytics_rule + + +def test_actual_create( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_collection: None, + create_query_collection: None, +) -> None: + """Test that the AnalyticsRulesV1 object can create an analytics_rule on Typesense Server.""" + response = actual_analytics_rules.create( + rule={ + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +def test_actual_update( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRulesV1 object can update an analytics_rule on Typesense Server.""" + response = actual_analytics_rules.upsert( + "company_analytics_rule", + { + "type": "popular_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "popular_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRulesV1 object can retrieve the rules from Typesense Server.""" + response = actual_analytics_rules.retrieve() + assert len(response["rules"]) == 1 + assert_match_object( + response["rules"][0], + { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + }, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncAnalyticsRulesV1 object is initialized correctly.""" + analytics_rules = AsyncAnalyticsRulesV1(fake_async_api_call) + + assert_match_object(analytics_rules.api_call, fake_async_api_call) + assert_object_lists_match( + analytics_rules.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rules.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not analytics_rules.rules + + +def test_get_missing_analytics_rule_async( + fake_async_analytics_rules_v1: AsyncAnalyticsRulesV1, +) -> None: + """Test that the AsyncAnalyticsRulesV1 object can get a missing analytics_rule.""" + + analytics_rule = fake_async_analytics_rules_v1["company_analytics_rule"] + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_async_analytics_rules_v1.api_call) + assert_object_lists_match( + analytics_rule.api_call.node_manager.nodes, + fake_async_analytics_rules_v1.api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_async_analytics_rules_v1.api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +def test_get_existing_analytics_rule_async( + fake_async_analytics_rules_v1: AsyncAnalyticsRulesV1, +) -> None: + """Test that the AsyncAnalyticsRulesV1 object can get an existing analytics_rule.""" + analytics_rule = fake_async_analytics_rules_v1["company_analytics_rule"] + fetched_analytics_rule = fake_async_analytics_rules_v1["company_analytics_rule"] + + assert len(fake_async_analytics_rules_v1.rules) == 1 + + assert analytics_rule is fetched_analytics_rule + + +async def test_actual_create_async( + actual_async_analytics_rules_v1: AsyncAnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_collection: None, + create_query_collection: None, +) -> None: + """Test that the AsyncAnalyticsRulesV1 object can create an analytics_rule on Typesense Server.""" + response = await actual_async_analytics_rules_v1.create( + rule={ + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +async def test_actual_update_async( + actual_async_analytics_rules_v1: AsyncAnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AsyncAnalyticsRulesV1 object can update an analytics_rule on Typesense Server.""" + response = await actual_async_analytics_rules_v1.upsert( + "company_analytics_rule", + { + "type": "popular_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "popular_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +async def test_actual_retrieve_async( + actual_async_analytics_rules_v1: AsyncAnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AsyncAnalyticsRulesV1 object can retrieve the rules from Typesense Server.""" + response = await actual_async_analytics_rules_v1.retrieve() + assert len(response["rules"]) == 1 + assert_match_object( + response["rules"][0], + { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + }, + ) diff --git a/tests/analytics_test.py b/tests/analytics_test.py new file mode 100644 index 0000000..e8f868d --- /dev/null +++ b/tests/analytics_test.py @@ -0,0 +1,55 @@ +"""Tests for the AnalyticsV1 class.""" + +import pytest +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.analytics import Analytics +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.analytics import AsyncAnalytics + + +@pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip AnalyticsV1 tests on v30+", +) +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsV1 object is initialized correctly.""" + analytics = Analytics(fake_api_call) + + assert_match_object(analytics.rules.api_call, fake_api_call) + assert_object_lists_match( + analytics.rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics.rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics.rules.rules + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncAnalytics object is initialized correctly.""" + analytics = AsyncAnalytics(fake_async_api_call) + + assert_match_object(analytics.rules.api_call, fake_async_api_call) + assert_object_lists_match( + analytics.rules.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + analytics.rules.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not analytics.rules.rules diff --git a/tests/analytics_v1_test.py b/tests/analytics_v1_test.py new file mode 100644 index 0000000..e2b00ac --- /dev/null +++ b/tests/analytics_v1_test.py @@ -0,0 +1,55 @@ +"""Tests for the AnalyticsV1 class.""" + +import pytest +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.analytics_v1 import AnalyticsV1 +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.analytics_v1 import AsyncAnalyticsV1 + + +@pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip AnalyticsV1 tests on v30+", +) +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsV1 object is initialized correctly.""" + analytics = AnalyticsV1(fake_api_call) + + assert_match_object(analytics.rules.api_call, fake_api_call) + assert_object_lists_match( + analytics.rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics.rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics.rules.rules + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncAnalyticsV1 object is initialized correctly.""" + analytics = AsyncAnalyticsV1(fake_async_api_call) + + assert_match_object(analytics.rules.api_call, fake_async_api_call) + assert_object_lists_match( + analytics.rules.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + analytics.rules.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not analytics.rules.rules diff --git a/tests/api_call_test.py b/tests/api_call_test.py new file mode 100644 index 0000000..ddff4ee --- /dev/null +++ b/tests/api_call_test.py @@ -0,0 +1,617 @@ +"""Unit Tests for the ApiCall class.""" + +import logging +import sys +import time + +from pytest_mock import MockFixture + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +import httpx +import pytest +import respx +from pytest_mock import MockerFixture + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense import exceptions +from typesense.sync.api_call import ApiCall, RequestHandler +from typesense.configuration import Configuration, Node +from typesense.logger import logger + + +def test_initialization( + fake_config: Configuration, +) -> None: + """Test the initialization of the ApiCall object.""" + fake_api_call = ApiCall(fake_config) + assert fake_api_call.config == fake_config + assert_object_lists_match(fake_api_call.node_manager.nodes, fake_config.nodes) + assert fake_api_call.node_manager.node_index == 0 + + +def test_node_due_for_health_check( + fake_api_call: ApiCall, +) -> None: + """Test that it correctly identifies if a node is due for health check.""" + node = Node(host="localhost", port=8108, protocol="http", path=" ") + node.last_access_ts = time.time() - 61 + assert fake_api_call.node_manager._is_due_for_health_check(node) is True + + +def test_get_node_nearest_healthy( + fake_api_call: ApiCall, +) -> None: + """Test that it correctly selects the nearest node if it is healthy.""" + node = fake_api_call.node_manager.get_node() + assert_match_object(node, fake_api_call.config.nearest_node) + + +def test_get_node_nearest_not_healthy( + fake_api_call: ApiCall, +) -> None: + """Test that it selects the next available node if the nearest node is not healthy.""" + fake_api_call.config.nearest_node.healthy = False + node = fake_api_call.node_manager.get_node() + assert_match_object(node, fake_api_call.node_manager.nodes[0]) + + +def test_get_node_round_robin_selection( + fake_api_call: ApiCall, + mocker: MockerFixture, +) -> None: + """Test that it selects the next available node in a round-robin fashion.""" + fake_api_call.config.nearest_node = None + mocker.patch("time.time", return_value=100) + + node1 = fake_api_call.node_manager.get_node() + assert_match_object(node1, fake_api_call.config.nodes[0]) + + node2 = fake_api_call.node_manager.get_node() + assert_match_object(node2, fake_api_call.config.nodes[1]) + + node3 = fake_api_call.node_manager.get_node() + assert_match_object(node3, fake_api_call.config.nodes[2]) + + +def test_get_exception() -> None: + """Test that it correctly returns the exception class for a given status code.""" + assert RequestHandler._get_exception(0) == exceptions.HTTPStatus0Error + assert RequestHandler._get_exception(400) == exceptions.RequestMalformed + assert RequestHandler._get_exception(401) == exceptions.RequestUnauthorized + assert RequestHandler._get_exception(403) == exceptions.RequestForbidden + assert RequestHandler._get_exception(404) == exceptions.ObjectNotFound + assert RequestHandler._get_exception(409) == exceptions.ObjectAlreadyExists + assert RequestHandler._get_exception(422) == exceptions.ObjectUnprocessable + assert RequestHandler._get_exception(500) == exceptions.ServerError + assert RequestHandler._get_exception(503) == exceptions.ServiceUnavailable + assert RequestHandler._get_exception(999) == exceptions.TypesenseClientError + + +def test_get_error_message_with_invalid_json() -> None: + """Test that it correctly handles invalid JSON in error responses.""" + response = httpx.Response( + 400, + headers={"Content-Type": "application/json"}, + content=b'{"message": "Error occurred", "details": {"key": "value"', + ) + + error_message = RequestHandler._get_error_message(response) + assert "API error: Invalid JSON response:" in error_message + assert '{"message": "Error occurred", "details": {"key": "value"' in error_message + + +def test_get_error_message_with_valid_json() -> None: + """Test that it correctly extracts error message from valid JSON responses.""" + response = httpx.Response( + 400, + headers={"Content-Type": "application/json"}, + content=b'{"message": "Error occurred", "details": {"key": "value"}}', + ) + + error_message = RequestHandler._get_error_message(response) + assert error_message == "Error occurred" + + +def test_get_error_message_with_non_json_content_type() -> None: + """Test that it returns a default error message for non-JSON content types.""" + response = httpx.Response( + 400, + headers={"Content-Type": "text/plain"}, + content=b"Not a JSON content", + ) + + error_message = RequestHandler._get_error_message(response) + assert error_message == "API error. Not a JSON content" + + +def test_normalize_params_with_booleans() -> None: + """Test that it correctly normalizes boolean values to strings.""" + parameter_dict: typing.Dict[str, str | bool] = {"key1": True, "key2": False} + RequestHandler.normalize_params(parameter_dict) + + assert parameter_dict == {"key1": "true", "key2": "false"} + + +def test_normalize_params_with_non_dict() -> None: + """Test that it raises when a non-dictionary is passed.""" + parameter_non_dict = "string" + + with pytest.raises(ValueError): + RequestHandler.normalize_params(parameter_non_dict) + + +def test_normalize_params_with_mixed_types() -> None: + """Test that it correctly normalizes boolean values to strings.""" + parameter_dict = {"key1": True, "key2": False, "key3": "value", "key4": 123} + RequestHandler.normalize_params(parameter_dict) + assert parameter_dict == { + "key1": "true", + "key2": "false", + "key3": "value", + "key4": 123, + } + + +def test_normalize_params_with_empty_dict() -> None: + """Test that it correctly normalizes an empty dictionary.""" + parameter_dict: typing.Dict[str, str] = {} + RequestHandler.normalize_params(parameter_dict) + assert not parameter_dict + + +def test_normalize_params_with_no_booleans() -> None: + """Test that it correctly normalizes a dictionary with no boolean values.""" + parameter_dict = {"key1": "value", "key2": 123} + RequestHandler.normalize_params(parameter_dict) + assert parameter_dict == {"key1": "value", "key2": 123} + + +def test_additional_headers(fake_api_call: ApiCall) -> None: + """Test the `make_request` method with additional headers from the config.""" + api_call = ApiCall( + Configuration( + { + "additional_headers": { + "AdditionalHeader1": "test", + "AdditionalHeader2": "test2", + }, + "api_key": "test-api", + "nodes": [ + "http://nearest:8108", + ], + }, + ), + ) + + with respx.mock: + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + + api_call._execute_request( + "GET", + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) + + request = respx.calls.last.request + assert request.headers["AdditionalHeader1"] == "test" + assert request.headers["AdditionalHeader2"] == "test2" + + +def test_make_request_as_json(fake_api_call: ApiCall) -> None: + """Test the `make_request` method with JSON response.""" + with respx.mock: + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + + response = fake_api_call._execute_request( + "GET", + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) + assert response == {"key": "value"} + + +def test_make_request_as_text(fake_api_call: ApiCall) -> None: + """Test the `make_request` method with text response.""" + with respx.mock: + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response(200, text="response text") + ) + + response = fake_api_call._execute_request( + "GET", + "/test", + as_json=False, + entity_type=typing.Dict[str, str], + ) + + assert response == "response text" + + +def test_get_as_json( + fake_api_call: ApiCall, +) -> None: + """Test the GET method with JSON response.""" + with respx.mock: + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + assert fake_api_call.get( + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) == {"key": "value"} + + +def test_get_as_text( + fake_api_call: ApiCall, +) -> None: + """Test the GET method with text response.""" + with respx.mock: + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response(200, text="response text") + ) + assert ( + fake_api_call.get("/test", as_json=False, entity_type=typing.Dict[str, str]) + == "response text" + ) + + +def test_post_as_json( + fake_api_call: ApiCall, +) -> None: + """Test the POST method with JSON response.""" + with respx.mock: + respx.post("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + assert fake_api_call.post( + "/test", + body={"data": "value"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) == { + "key": "value", + } + + +def test_post_with_params( + fake_api_call: ApiCall, +) -> None: + """Test that the parameters are correctly passed to the request.""" + with respx.mock: + route = respx.post("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + + parameter_set = {"key1": [True, False], "key2": False, "key3": "value"} + + post_result = fake_api_call.post( + "/test", + params=parameter_set, + body={"key": "value"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) + + expected_parameter_set = { + "key1": ["true", "false"], + "key2": ["false"], + "key3": ["value"], + } + + request = route.calls.last.request + # respx stores params as a MultiDict, convert to dict for comparison + params_dict: typing.Dict[str, typing.List[str]] = {} + for key, value in request.url.params.multi_items(): + if key in params_dict: + params_dict[key].append(value) + else: + params_dict[key] = [value] + assert params_dict == expected_parameter_set + assert post_result == {"key": "value"} + + +def test_post_as_text( + fake_api_call: ApiCall, +) -> None: + """Test the POST method with text response.""" + with respx.mock: + respx.post("http://nearest:8108/test").mock( + return_value=httpx.Response(200, text="response text") + ) + post_result = fake_api_call.post( + "/test", + body={"data": "value"}, + as_json=False, + entity_type=typing.Dict[str, str], + ) + assert post_result == "response text" + + +def test_put_as_json( + fake_api_call: ApiCall, +) -> None: + """Test the PUT method with JSON response.""" + with respx.mock: + respx.put("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + assert fake_api_call.put( + "/test", + body={"data": "value"}, + entity_type=typing.Dict[str, str], + ) == {"key": "value"} + + +def test_patch_as_json( + fake_api_call: ApiCall, +) -> None: + """Test the PATCH method with JSON response.""" + with respx.mock: + respx.patch("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + assert fake_api_call.patch( + "/test", + body={"data": "value"}, + entity_type=typing.Dict[str, str], + ) == {"key": "value"} + + +def test_delete_as_json( + fake_api_call: ApiCall, +) -> None: + """Test the DELETE method with JSON response.""" + with respx.mock: + respx.delete("http://nearest:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + + response = fake_api_call.delete("/test", entity_type=typing.Dict[str, str]) + assert response == {"key": "value"} + + +def test_raise_custom_exception_with_header( + fake_api_call: ApiCall, +) -> None: + """Test that it raises a custom exception with the error message.""" + with respx.mock: + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response( + 400, + json={"message": "Test error"}, + headers={"Content-Type": "application/json"}, + ) + ) + + with pytest.raises(exceptions.RequestMalformed) as exception: + fake_api_call._execute_request( + "GET", + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) + assert str(exception.value) == "[Errno 400] Test error" + + +def test_raise_custom_exception_without_header( + fake_api_call: ApiCall, +) -> None: + """Test that it raises a custom exception with the error message.""" + with respx.mock: + # Use content instead of json to avoid automatic Content-Type header + # This tests the case where Content-Type is not application/json + respx.get("http://nearest:8108/test").mock( + return_value=httpx.Response( + 400, + content=b'{"message": "Test error"}', + headers={"Content-Type": "text/plain"}, + ) + ) + + with pytest.raises(exceptions.RequestMalformed) as exception: + fake_api_call._execute_request( + "GET", + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) + assert ( + str(exception.value) == '[Errno 400] API error. {"message": "Test error"}' + ) + + +def test_selects_next_available_node_on_timeout( + fake_api_call: ApiCall, +) -> None: + """Test that it selects the next available node if the request times out.""" + with respx.mock: + fake_api_call.config.nearest_node = None + respx.get("http://node0:8108/test").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node1:8108/test").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node2:8108/test").mock( + return_value=httpx.Response(200, json={"key": "value"}) + ) + + response = fake_api_call.get( + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) + + assert response == {"key": "value"} + assert respx.calls[0].request.url == "http://node0:8108/test" + assert respx.calls[1].request.url == "http://node1:8108/test" + assert respx.calls[2].request.url == "http://node2:8108/test" + assert len(respx.calls) == 3 + + +def test_get_node_no_healthy_nodes( + fake_api_call: ApiCall, + mocker: MockFixture, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that it logs a message if no healthy nodes are found.""" + for api_node in fake_api_call.node_manager.nodes: + api_node.healthy = False + + fake_api_call.config.nearest_node.healthy = False + + mocker.patch.object( + fake_api_call.node_manager, + "_is_due_for_health_check", + return_value=False, + ) + + # Need to set the logger level to DEBUG to capture the message + logger.setLevel(logging.DEBUG) + + selected_node = fake_api_call.node_manager.get_node() + + with caplog.at_level(logging.DEBUG): + assert "No healthy nodes were found. Returning the next node." in caplog.text + + assert ( + selected_node + == fake_api_call.node_manager.nodes[fake_api_call.node_manager.node_index] + ) + + assert fake_api_call.node_manager.node_index == 0 + + +def test_raises_if_no_nodes_are_healthy_with_the_last_exception( + fake_api_call: ApiCall, +) -> None: + """Test that it raises the last exception if no nodes are healthy.""" + with respx.mock: + respx.get("http://nearest:8108/").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node0:8108/").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node1:8108/").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node2:8108/").mock( + side_effect=httpx.ConnectError("SSL Error") + ) + + with pytest.raises(httpx.ConnectError): + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + + +def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 + mocker: MockerFixture, + fake_api_call: ApiCall, +) -> None: + """Test that it uses the nearest node if it is present and healthy.""" + with respx.mock: + nearest_route = respx.get("http://nearest:8108/") + nearest_route.mock(side_effect=httpx.ConnectTimeout("Timeout")) + respx.get("http://node0:8108/").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node1:8108/").mock( + side_effect=httpx.ConnectTimeout("Timeout") + ) + respx.get("http://node2:8108/").mock( + return_value=httpx.Response(200, json={"message": "Success"}) + ) + + # Freeze time + current_time = time.time() + mocker.patch("time.time", return_value=current_time) + + # Perform the requests + + # 1 should go to nearest, + # 2 should go to node0, + # 3 should go to node1, + # 4 should go to node2 and resolve the request: 4 requests + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + # 1 should go to node2 and resolve the request: 1 request + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + # 1 should go to node2 and resolve the request: 1 request + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + + # Advance time by 5 seconds + mocker.patch("time.time", return_value=current_time + 5) + fake_api_call.get( + "/", + entity_type=typing.Dict[str, str], + ) # 1 should go to node2 and resolve the request: 1 request + + # Advance time by 65 seconds + mocker.patch("time.time", return_value=current_time + 65) + + # 1 should go to nearest, + # 2 should go to node0, + # 3 should go to node1, + # 4 should go to node2 and resolve the request: 4 requests + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + + # Advance time by 185 seconds + mocker.patch("time.time", return_value=current_time + 185) + + # Resolve the request on the nearest node + nearest_route.mock( + return_value=httpx.Response(200, json={"message": "Success"}) + ) + + # 1 should go to nearest and resolve the request: 1 request + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + # 1 should go to nearest and resolve the request: 1 request + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + # 1 should go to nearest and resolve the request: 1 request + fake_api_call.get("/", entity_type=typing.Dict[str, str]) + + # Check the request history + assert str(respx.calls[0].request.url) == "http://nearest:8108/" + assert str(respx.calls[1].request.url) == "http://node0:8108/" + assert str(respx.calls[2].request.url) == "http://node1:8108/" + assert str(respx.calls[3].request.url) == "http://node2:8108/" + + assert str(respx.calls[4].request.url) == "http://node2:8108/" + assert str(respx.calls[5].request.url) == "http://node2:8108/" + + assert str(respx.calls[6].request.url) == "http://node2:8108/" + + assert str(respx.calls[7].request.url) == "http://nearest:8108/" + assert str(respx.calls[8].request.url) == "http://node0:8108/" + assert str(respx.calls[9].request.url) == "http://node1:8108/" + assert str(respx.calls[10].request.url) == "http://node2:8108/" + + assert str(respx.calls[11].request.url) == "http://nearest:8108/" + assert str(respx.calls[12].request.url) == "http://nearest:8108/" + assert str(respx.calls[13].request.url) == "http://nearest:8108/" + + +def test_max_retries_no_last_exception(fake_api_call: ApiCall) -> None: + """Test that it raises if the maximum number of retries is reached.""" + with pytest.raises( + exceptions.TypesenseClientError, + match="All nodes are unhealthy", + ): + fake_api_call._execute_request( + "GET", + "/", + as_json=True, + entity_type=typing.Dict[str, str], + num_retries=10, + last_exception=None, + ) diff --git a/tests/client_test.py b/tests/client_test.py new file mode 100644 index 0000000..ffff3a4 --- /dev/null +++ b/tests/client_test.py @@ -0,0 +1,74 @@ +"""Tests for the Client class.""" + +from tests.fixtures.document_fixtures import Companies +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.client import Client +from typesense.configuration import ConfigDict + + +def test_client_init(fake_config_dict: ConfigDict) -> None: + """Test the Client class __init__ method.""" + fake_client = Client(fake_config_dict) + assert fake_client.config == fake_client.api_call.config + + assert_match_object(fake_client.api_call.config, fake_client.config) + assert_object_lists_match( + fake_client.api_call.node_manager.nodes, fake_client.config.nodes + ) + assert_match_object( + fake_client.api_call.config.nearest_node, + fake_client.config.nearest_node, + ) + + assert fake_client.collections + assert fake_client.collections.collections is not None + assert fake_client.multi_search + assert fake_client.keys + assert fake_client.keys.keys is not None + assert fake_client.aliases + assert fake_client.aliases.aliases is not None + assert fake_client.analyticsV1 + assert fake_client.analyticsV1.rules + assert fake_client.analyticsV1.rules.rules is not None + assert fake_client.operations + assert fake_client.debug + + +def test_get_collection(fake_client: Client) -> None: + """Test the Client class get_collection method.""" + collection = fake_client.typed_collection(model=Companies, name="companies") + + assert collection + assert collection.name == "companies" + assert collection.documents.documents is not None + + +def test_get_collection_no_name(fake_client: Client) -> None: + """Test the Client class get_collection method.""" + collection = fake_client.typed_collection(model=Companies) + + assert collection + assert collection.name == "companies" + assert collection.documents.documents is not None + + +def test_retrieve_collection_actual( + actual_client: Client, + delete_all: None, + create_collection: None, +) -> None: + """Test that the client can retrieve an actual collection.""" + collection = actual_client.typed_collection(model=Companies, name="companies") + + assert collection is not None + + +def test_retrieve_collection_actual_no_name( + actual_client: Client, + delete_all: None, + create_collection: None, +) -> None: + """Test that the client can retrieve an actual collection.""" + collection = actual_client.typed_collection(model=Companies) + + assert collection is not None diff --git a/tests/collection_test.py b/tests/collection_test.py new file mode 100644 index 0000000..7eb29fa --- /dev/null +++ b/tests/collection_test.py @@ -0,0 +1,102 @@ +"""Tests for the Collection class.""" + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.sync.collection import Collection +from typesense.sync.collections import Collections +from typesense.types.collection import CollectionSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Collection object is initialized correctly.""" + collection = Collection(fake_api_call, "companies") + + assert collection.name == "companies" + assert_match_object(collection.api_call, fake_api_call) + assert_object_lists_match( + collection.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + collection.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert collection.overrides.collection_name == "companies" + assert collection._endpoint_path == "/collections/companies" # noqa: WPS437 + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collection object can retrieve a collection.""" + response = actual_collections["companies"].retrieve() + + expected: CollectionSchema = { + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": True, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + "synonym_sets": [], + "curation_sets": [], + } + + response.pop("created_at") + + assert response == expected + + +def test_actual_update( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collection object can update a collection.""" + response = actual_collections["companies"].update( + {"fields": [{"name": "num_locations", "type": "int32"}]}, + ) + + expected: CollectionSchema = { + "fields": [ + {"name": "num_locations", "truncate_len": 100, "type": "int32"}, + ], + } + + assert_to_contain_object(response.get("fields")[0], expected.get("fields")[0]) diff --git a/tests/collections_test.py b/tests/collections_test.py new file mode 100644 index 0000000..788e3dc --- /dev/null +++ b/tests/collections_test.py @@ -0,0 +1,353 @@ +"""Tests for the Collections class.""" + + +import sys + +from typesense.async_.api_call import AsyncApiCall + + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.api_call import ApiCall +from typesense.sync.collections import Collections +from typesense.async_.collections import AsyncCollections +from typesense.types.collection import CollectionSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Collections object is initialized correctly.""" + collections = Collections(fake_api_call) + + assert_match_object(collections.api_call, fake_api_call) + assert_object_lists_match( + collections.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + collections.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert not collections.collections + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the Collections object is initialized correctly.""" + collections = AsyncCollections(fake_async_api_call) + + assert_match_object(collections.api_call, fake_async_api_call) + assert_object_lists_match( + collections.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + collections.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert not collections.collections + + +def test_get_missing_collection(fake_collections: Collections) -> None: + """Test that the Collections object can get a missing collection.""" + collection = fake_collections["companies"] + + assert collection.name == "companies" + assert_match_object(collection.api_call, fake_collections.api_call) + assert_object_lists_match( + collection.api_call.node_manager.nodes, + fake_collections.api_call.node_manager.nodes, + ) + assert_match_object( + collection.api_call.config.nearest_node, + fake_collections.api_call.config.nearest_node, + ) + assert collection.overrides.collection_name == "companies" + assert collection._endpoint_path == "/collections/companies" # noqa: WPS437 + + +def test_get_missing_collection_async(fake_async_collections: Collections) -> None: + """Test that the Collections object can get a missing collection.""" + collection = fake_async_collections["companies"] + + assert collection.name == "companies" + assert_match_object(collection.api_call, fake_async_collections.api_call) + assert_object_lists_match( + collection.api_call.node_manager.nodes, + fake_async_collections.api_call.node_manager.nodes, + ) + assert_match_object( + collection.api_call.config.nearest_node, + fake_async_collections.api_call.config.nearest_node, + ) + assert collection.overrides.collection_name == "companies" + assert collection._endpoint_path == "/collections/companies" # noqa: WPS437 + + +def test_get_existing_collection(fake_collections: Collections) -> None: + """Test that the Collections object can get an existing collection.""" + collection = fake_collections["companies"] + fetched_collection = fake_collections["companies"] + + assert len(fake_collections.collections) == 1 + + assert collection is fetched_collection + + +def test_actual_create(actual_collections: Collections, delete_all: None) -> None: + """Test that the Collections object can create a collection on Typesense Server.""" + expected: CollectionSchema = { + "default_sorting_field": "", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + "synonym_sets": [], + "curation_sets": [], + } + + response = actual_collections.create( + { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + "sort": False, + }, + ], + }, + ) + + response.pop("created_at") + + assert response == expected + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collections object can retrieve collections.""" + response = actual_collections.retrieve() + + expected: typing.List[CollectionSchema] = [ + { + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": True, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + "synonym_sets": [], + "curation_sets": [], + }, + ] + + response[0].pop("created_at") + assert response == expected + + +def test_actual_contains( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collections object can check if a collection exists in Typesense.""" + # Test for existing collection + assert "companies" in actual_collections + + # Test for non-existing collection + assert "non_existent_collection" not in actual_collections + # Test again + assert "non_existent_collection" not in actual_collections + + +async def test_actual_create_async( + actual_async_collections: AsyncCollections, delete_all: None +) -> None: + """Test that the Collections object can create a collection on Typesense Server.""" + expected: CollectionSchema = { + "default_sorting_field": "", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + "synonym_sets": [], + "curation_sets": [], + } + + response = await actual_async_collections.create( + { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + "sort": False, + }, + ], + }, + ) + + response.pop("created_at") + + assert response == expected + + +async def test_actual_retrieve_async( + actual_async_collections: AsyncCollections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collections object can retrieve collections.""" + response = await actual_async_collections.retrieve() + + expected: typing.List[CollectionSchema] = [ + { + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": True, + "infix": False, + "stem": False, + "stem_dictionary": "", + "truncate_len": 100, + "store": True, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + "synonym_sets": [], + "curation_sets": [], + }, + ] + + response[0].pop("created_at") + assert response == expected diff --git a/tests/configuration_test.py b/tests/configuration_test.py new file mode 100644 index 0000000..da3166f --- /dev/null +++ b/tests/configuration_test.py @@ -0,0 +1,186 @@ +"""Tests for the Configuration class.""" + +import types + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.configuration import ConfigDict, Configuration, Node +from typesense.exceptions import ConfigError + +DEFAULT_NODE = types.MappingProxyType( + {"host": "localhost", "port": 8108, "protocol": "http"}, +) + + +def test_configuration_defaults() -> None: + """Test the Configuration constructor defaults.""" + config: ConfigDict = { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + "path": "3", + }, + DEFAULT_NODE, + ], + "nearest_node": DEFAULT_NODE, + "api_key": "xyz", + } + + configuration = Configuration(config) + + nodes = [ + Node(host="localhost", port=8108, protocol="http", path=""), + Node(host="localhost", port=8108, protocol="http", path="3"), + ] + nearest_node = Node(host="localhost", port=8108, protocol="http", path="") + + assert_object_lists_match(configuration.nodes, nodes) + + assert_match_object(configuration.nearest_node, nearest_node) + + expected = { + "api_key": "xyz", + "connection_timeout_seconds": 3.0, + "num_retries": 3, + "retry_interval_seconds": 1.0, + "verify": True, + } + + assert_to_contain_object(configuration, expected) + + +def test_configuration_explicit() -> None: + """Test the Configuration constructor with explicit values.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": DEFAULT_NODE, + "api_key": "xyz", + "connection_timeout_seconds": 5.0, + "num_retries": 5, + "retry_interval_seconds": 2.0, + "verify": False, + "additional_headers": {"X-Test": "test", "X-Test2": "test2"}, + } + + configuration = Configuration(config) + + nodes = [Node(host="localhost", port=8108, protocol="http", path="")] + nearest_node = Node(host="localhost", port=8108, protocol="http", path="") + + assert_object_lists_match(configuration.nodes, nodes) + assert_match_object(configuration.nearest_node, nearest_node) + + expected = { + "api_key": "xyz", + "connection_timeout_seconds": 5.0, + "num_retries": 5, + "retry_interval_seconds": 2.0, + "verify": False, + "additional_headers": {"X-Test": "test", "X-Test2": "test2"}, + } + + assert_to_contain_object(configuration, expected) + + +def test_configuration_no_nearest_node() -> None: + """Test the Configuration constructor with no nearest node.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "api_key": "xyz", + } + + configuration = Configuration(config) + + nodes = Node(host="localhost", port=8108, protocol="http", path="") + + for node in configuration.nodes: + assert_match_object(node, nodes) + + expected = { + "api_key": "xyz", + "connection_timeout_seconds": 3.0, + "num_retries": 3, + "retry_interval_seconds": 1.0, + "verify": True, + "nearest_node": None, + } + assert_to_contain_object(configuration, expected) + + +def test_configuration_empty_nodes() -> None: + """Test the Configuration constructor with empty nodes.""" + config: ConfigDict = { + "nodes": [], + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="`nodes` is not defined.", # noqa: B950 + ): + Configuration(config) + + +def test_configuration_invalid_node() -> None: + """Test the Configuration constructor with an invalid node.""" + config: ConfigDict = { + "nodes": [{"host": "localhost"}], + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="`node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + Configuration(config) + + +def test_configuration_invalid_node_url() -> None: + """Test the Configuration constructor with an invalid node as a url.""" + config: ConfigDict = { + "nodes": ["http://localhost"], + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="Node URL does not contain the port.", + ): + Configuration(config) + + +def test_configuration_invalid_nearest_node() -> None: + """Test the Configuration constructor with an invalid nearest node.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": {"host": "localhost"}, + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="`nearest_node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + Configuration(config) + + +def test_configuration_invalid_nearest_node_url() -> None: + """Test the Configuration constructor with an invalid nearest node as a url.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost", + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="Node URL does not contain the port.", + ): + Configuration(config) diff --git a/tests/configuration_validations_test.py b/tests/configuration_validations_test.py new file mode 100644 index 0000000..d408e05 --- /dev/null +++ b/tests/configuration_validations_test.py @@ -0,0 +1,201 @@ +"""Tests for the ConfigurationValidations class.""" + +import types + +import pytest + +from typesense.configuration import ConfigDict, ConfigurationValidations +from typesense.exceptions import ConfigError + +DEFAULT_NODE = types.MappingProxyType( + {"host": "localhost", "port": 8108, "protocol": "http"}, +) + + +def test_validate_node_fields_with_url() -> None: + """Test validate_node_fields with a URL string.""" + assert ConfigurationValidations.validate_node_fields("http://localhost:8108/path") + + +def test_validate_node_fields_with_valid_dict() -> None: + """Test validate_node_fields with a valid dictionary.""" + assert ConfigurationValidations.validate_node_fields( + DEFAULT_NODE, + ) + + +def test_validate_node_fields_with_invalid_dict() -> None: + """Test validate_node_fields with an invalid dictionary.""" + assert not ConfigurationValidations.validate_node_fields( + { + "host": "localhost", + "port": 8108, + }, + ) + + +def test_deprecation_warning_timeout_seconds(caplog: pytest.LogCaptureFixture) -> None: + """Test that a deprecation warning is issued for the 'timeout_seconds' field.""" + config_dict: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "timeout_seconds": 10, + } + ConfigurationValidations.show_deprecation_warnings(config_dict) + assert ( + " ".join( + [ + "Deprecation warning: timeout_seconds is now renamed", + "to connection_timeout_seconds", + ], + ) + in caplog.text + ) + + +def test_deprecation_warning_master_node(caplog: pytest.LogCaptureFixture) -> None: + """Test that a deprecation warning is issued for the 'master_node' field.""" + config_dict: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "master_node": "http://localhost:8108", + } + ConfigurationValidations.show_deprecation_warnings(config_dict) + assert ( + "Deprecation warning: master_node is now consolidated to nodes" in caplog.text + ) + + +def test_deprecation_warning_read_replica_nodes( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that a deprecation warning is issued for the 'read_replica_nodes' field.""" + config_dict: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "read_replica_nodes": ["http://localhost:8109"], + } + ConfigurationValidations.show_deprecation_warnings(config_dict) + + assert ( + "Deprecation warning: read_replica_nodes is now consolidated to nodes" + ) in caplog.text + + +def test_validate_config_dict() -> None: + """Test validate_config_dict.""" + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_string_nearest_node() -> None: + """Test validate_config_dict with nearest node as a string.""" + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_string_nodes() -> None: + """Test validate_config_dict with nodes as a string.""" + ConfigurationValidations.validate_config_dict( + { + "nodes": "http://localhost:8108", + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_no_nodes() -> None: + """Test validate_config_dict with no nodes.""" + with pytest.raises(ConfigError, match="`nodes` is not defined."): + ConfigurationValidations.validate_config_dict( + { + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_no_api_key() -> None: + """Test validate_config_dict with no api_key.""" + with pytest.raises(ConfigError, match="`api_key` is not defined."): + ConfigurationValidations.validate_config_dict( + { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + }, + ) + + +def test_validate_config_dict_with_wrong_node() -> None: + """Test validate_config_dict with wrong node.""" + with pytest.raises( + ConfigError, + match="`node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "wrong_field": "invalid", + }, + ], + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_wrong_nearest_node() -> None: + """Test validate_config_dict with wrong nearest node.""" + with pytest.raises( + ConfigError, + match="`nearest_node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "localhost", + "port": 8108, + "wrong_field": "invalid", + }, + "api_key": "xyz", + }, + ) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..595a742 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,12 @@ +"""Pytest configuration file.""" + +from glob import glob + +import pytest + +pytest.register_assert_rewrite("utils.object_assertions") + +pytest_plugins = [ + fixture_file.replace("/", ".").replace(".py", "") + for fixture_file in glob("**/tests/fixtures/[!__]*.py", recursive=True) +] diff --git a/tests/conversation_model_test.py b/tests/conversation_model_test.py new file mode 100644 index 0000000..6f90550 --- /dev/null +++ b/tests/conversation_model_test.py @@ -0,0 +1,213 @@ +"""Tests for the ConversationModel class.""" + + +import pytest +from dotenv import load_dotenv + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.conversation_model import AsyncConversationModel +from typesense.async_.conversations_models import AsyncConversationsModels +from typesense.sync.conversation_model import ConversationModel +from typesense.sync.conversations_models import ConversationsModels + +load_dotenv() + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the ConversationModel object is initialized correctly.""" + conversation_model = ConversationModel( + fake_api_call, + "conversation_model_id", + ) + + assert conversation_model.model_id == "conversation_model_id" + assert_match_object(conversation_model.api_call, fake_api_call) + assert_object_lists_match( + conversation_model.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + conversation_model.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + conversation_model._endpoint_path # noqa: WPS437 + == "/conversations/models/conversation_model_id" + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncConversationModel object is initialized correctly.""" + conversation_model = AsyncConversationModel( + fake_async_api_call, + "conversation_model_id", + ) + + assert conversation_model.model_id == "conversation_model_id" + assert_match_object(conversation_model.api_call, fake_async_api_call) + assert_object_lists_match( + conversation_model.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + conversation_model.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert ( + conversation_model._endpoint_path # noqa: WPS437 + == "/conversations/models/conversation_model_id" + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_conversations_models: ConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test it can retrieve a conversation_model from Typesense Server.""" + response = actual_conversations_models[create_conversations_model].retrieve() + + assert_to_contain_keys( + response, + ["id", "model_name", "system_prompt", "max_bytes", "api_key"], + ) + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +def test_actual_update( + actual_conversations_models: ConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can update a conversation_model from Typesense Server.""" + response = actual_conversations_models[create_conversations_model].update( + {"system_prompt": "This is a new system prompt"}, + ) + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + "ttl", + "history_collection", + ], + ) + + assert response.get("system_prompt") == "This is a new system prompt" + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +def test_actual_delete( + actual_conversations_models: ConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can delete an conversation_model from Typesense Server.""" + response = actual_conversations_models[create_conversations_model].delete() + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + "ttl", + "history_collection", + ], + ) + + assert response.get("system_prompt") == "This is a system prompt" + assert response.get("id") == create_conversations_model + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +async def test_actual_retrieve_async( + actual_async_conversations_models: AsyncConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test it can retrieve a conversation_model from Typesense Server.""" + response = await actual_async_conversations_models[ + create_conversations_model + ].retrieve() + + assert_to_contain_keys( + response, + ["id", "model_name", "system_prompt", "max_bytes", "api_key"], + ) + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +async def test_actual_update_async( + actual_async_conversations_models: AsyncConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can update a conversation_model from Typesense Server.""" + response = await actual_async_conversations_models[ + create_conversations_model + ].update( + {"system_prompt": "This is a new system prompt"}, + ) + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + "ttl", + "history_collection", + ], + ) + + assert response.get("system_prompt") == "This is a new system prompt" + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +async def test_actual_delete_async( + actual_async_conversations_models: AsyncConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can delete an conversation_model from Typesense Server.""" + response = await actual_async_conversations_models[ + create_conversations_model + ].delete() + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + "ttl", + "history_collection", + ], + ) + + assert response.get("system_prompt") == "This is a system prompt" + assert response.get("id") == create_conversations_model + assert response.get("id") == create_conversations_model diff --git a/tests/conversations_models_test.py b/tests/conversations_models_test.py new file mode 100644 index 0000000..97964d8 --- /dev/null +++ b/tests/conversations_models_test.py @@ -0,0 +1,214 @@ +"""Tests for the ConversationsModels class.""" + + +import os +import sys + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.conversations_models import AsyncConversationsModels +from typesense.sync.conversations_models import ConversationsModels + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the ConversationsModels object is initialized correctly.""" + conversations_models = ConversationsModels(fake_api_call) + + assert_match_object(conversations_models.api_call, fake_api_call) + assert_object_lists_match( + conversations_models.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + conversations_models.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not conversations_models.conversations_models + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncConversationsModels object is initialized correctly.""" + conversations_models = AsyncConversationsModels(fake_async_api_call) + + assert_match_object(conversations_models.api_call, fake_async_api_call) + assert_object_lists_match( + conversations_models.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + conversations_models.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not conversations_models.conversations_models + + +def test_get_missing_conversations_model( + fake_conversations_models: ConversationsModels, +) -> None: + """Test that the ConversationsModels object can get a missing conversations_model.""" + conversations_model = fake_conversations_models["conversation_model_id"] + + assert_match_object( + conversations_model.api_call, + fake_conversations_models.api_call, + ) + assert_object_lists_match( + conversations_model.api_call.node_manager.nodes, + fake_conversations_models.api_call.node_manager.nodes, + ) + assert_match_object( + conversations_model.api_call.config.nearest_node, + fake_conversations_models.api_call.config.nearest_node, + ) + assert ( + conversations_model._endpoint_path # noqa: WPS437 + == "/conversations/models/conversation_model_id" + ) + + +def test_get_missing_conversations_model_async( + fake_async_conversations_models: AsyncConversationsModels, +) -> None: + """Test that the AsyncConversationsModels object can get a missing conversations_model.""" + conversations_model = fake_async_conversations_models["conversation_model_id"] + + assert_match_object( + conversations_model.api_call, + fake_async_conversations_models.api_call, + ) + assert_object_lists_match( + conversations_model.api_call.node_manager.nodes, + fake_async_conversations_models.api_call.node_manager.nodes, + ) + assert_match_object( + conversations_model.api_call.config.nearest_node, + fake_async_conversations_models.api_call.config.nearest_node, + ) + assert ( + conversations_model._endpoint_path # noqa: WPS437 + == "/conversations/models/conversation_model_id" + ) + + +def test_get_existing_conversations_model( + fake_conversations_models: ConversationsModels, +) -> None: + """Test that the ConversationsModels object can get an existing conversations_model.""" + conversations_model = fake_conversations_models["conversations_model_id"] + fetched_conversations_model = fake_conversations_models["conversations_model_id"] + + assert len(fake_conversations_models.conversations_models) == 1 + + assert conversations_model is fetched_conversations_model + + +def test_get_existing_conversations_model_async( + fake_async_conversations_models: AsyncConversationsModels, +) -> None: + """Test that the AsyncConversationsModels object can get an existing conversations_model.""" + conversations_model = fake_async_conversations_models["conversations_model_id"] + fetched_conversations_model = fake_async_conversations_models[ + "conversations_model_id" + ] + + assert len(fake_async_conversations_models.conversations_models) == 1 + + assert conversations_model is fetched_conversations_model + + +@pytest.mark.open_ai +def test_actual_create( + actual_conversations_models: ConversationsModels, + create_conversation_history_collection: None, +) -> None: + """Test that it can create an conversations_model on Typesense Server.""" + response = actual_conversations_models.create( + { + "api_key": os.environ["OPEN_AI_KEY"], + "history_collection": "conversation_store", + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is meant for testing purposes", + }, + ) + + assert_to_contain_keys( + response, + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_conversations_models: ConversationsModels, + delete_all: None, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can retrieve an conversations_model from Typesense Server.""" + response = actual_conversations_models.retrieve() + assert len(response) == 1 + assert_to_contain_object( + response[0], + { + "id": create_conversations_model, + }, + ) + assert_to_contain_keys( + response[0], + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +async def test_actual_create_async( + actual_async_conversations_models: AsyncConversationsModels, + create_conversation_history_collection: None, +) -> None: + """Test that it can create an conversations_model on Typesense Server.""" + response = await actual_async_conversations_models.create( + { + "api_key": os.environ["OPEN_AI_KEY"], + "history_collection": "conversation_store", + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is meant for testing purposes", + }, + ) + + assert_to_contain_keys( + response, + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +async def test_actual_retrieve_async( + actual_async_conversations_models: AsyncConversationsModels, + delete_all: None, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can retrieve an conversations_model from Typesense Server.""" + response = await actual_async_conversations_models.retrieve() + assert len(response) == 1 + assert_to_contain_object( + response[0], + { + "id": create_conversations_model, + }, + ) + assert_to_contain_keys( + response[0], + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) diff --git a/tests/curation_set_test.py b/tests/curation_set_test.py new file mode 100644 index 0000000..a75196a --- /dev/null +++ b/tests/curation_set_test.py @@ -0,0 +1,347 @@ +"""Tests for the CurationSet class including items APIs.""" + + +import pytest + +from tests.utils.version import is_v30_or_above +from typesense.async_.curation_set import AsyncCurationSet +from typesense.async_.curation_sets import AsyncCurationSets +from typesense.sync.client import Client +from typesense.sync.curation_set import CurationSet +from typesense.sync.curation_sets import CurationSets +from typesense.types.curation_set import CurationItemSchema + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run curation set tests only on v30+", +) + + +def test_paths(fake_curation_set: CurationSet) -> None: + assert fake_curation_set._endpoint_path == "/curation_sets/products" # noqa: WPS437 + assert fake_curation_set._items_path == "/curation_sets/products/items" # noqa: WPS437 + + +def test_paths_async(fake_async_curation_set: AsyncCurationSet) -> None: + assert fake_async_curation_set._endpoint_path == "/curation_sets/products" # noqa: WPS437 + assert fake_async_curation_set._items_path == "/curation_sets/products/items" # noqa: WPS437 + + +def test_actual_retrieve( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can retrieve a curation set from Typesense Server.""" + response = actual_curation_sets["products"].retrieve() + + assert response == { + "items": [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ], + "name": "products", + } + + +def test_actual_delete( + actual_curation_sets: CurationSets, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can delete a curation set from Typesense Server.""" + response = actual_curation_sets["products"].delete() + + print(response) + assert response == {"name": "products"} + + +def test_actual_list_items( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can list items from Typesense Server.""" + response = actual_curation_sets["products"].list_items() + + assert response == [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ] + + +def test_actual_get_item( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can get a specific item from Typesense Server.""" + response = actual_curation_sets["products"].get_item("rule-1") + + assert response == { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + } + + +def test_actual_upsert_item( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can upsert an item in Typesense Server.""" + payload: CurationItemSchema = { + "id": "rule-2", + "rule": {"query": "boot", "match": "exact"}, + "includes": [{"id": "456", "position": 2}], + "excludes": [{"id": "888"}], + } + response = actual_curation_sets["products"].upsert_item("rule-2", payload) + + assert response == { + "excludes": [ + { + "id": "888", + }, + ], + "id": "rule-2", + "includes": [ + { + "id": "456", + "position": 2, + }, + ], + "rule": { + "match": "exact", + "query": "boot", + }, + } + + +def test_actual_delete_item( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can delete an item from Typesense Server.""" + response = actual_curation_sets["products"].delete_item("rule-1") + + assert response == {"id": "rule-1"} + + +async def test_actual_retrieve_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSet object can retrieve a curation set from Typesense Server.""" + response = await actual_async_curation_sets["products"].retrieve() + + assert response == { + "items": [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ], + "name": "products", + } + + +async def test_actual_delete_async( + actual_async_curation_sets: AsyncCurationSets, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSet object can delete a curation set from Typesense Server.""" + response = await actual_async_curation_sets["products"].delete() + + assert response == {"name": "products"} + + +async def test_actual_list_items_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSet object can list items from Typesense Server.""" + response = await actual_async_curation_sets["products"].list_items() + + assert response == [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ] + + +async def test_actual_get_item_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSet object can get a specific item from Typesense Server.""" + response = await actual_async_curation_sets["products"].get_item("rule-1") + + assert response == { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + } + + +async def test_actual_upsert_item_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSet object can upsert an item in Typesense Server.""" + payload: CurationItemSchema = { + "id": "rule-2", + "rule": {"query": "boot", "match": "exact"}, + "includes": [{"id": "456", "position": 2}], + "excludes": [{"id": "888"}], + } + response = await actual_async_curation_sets["products"].upsert_item( + "rule-2", payload + ) + + assert response == { + "excludes": [ + { + "id": "888", + }, + ], + "id": "rule-2", + "includes": [ + { + "id": "456", + "position": 2, + }, + ], + "rule": { + "match": "exact", + "query": "boot", + }, + } + + +async def test_actual_delete_item_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSet object can delete an item from Typesense Server.""" + response = await actual_async_curation_sets["products"].delete_item("rule-1") + + assert response == {"id": "rule-1"} diff --git a/tests/curation_sets_test.py b/tests/curation_sets_test.py new file mode 100644 index 0000000..0a06a05 --- /dev/null +++ b/tests/curation_sets_test.py @@ -0,0 +1,175 @@ +"""Tests for the CurationSets class.""" + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from tests.utils.version import is_v30_or_above +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.curation_sets import AsyncCurationSets +from typesense.sync.client import Client +from typesense.sync.curation_sets import CurationSets + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run curation sets tests only on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the CurationSets object is initialized correctly.""" + cur_sets = CurationSets(fake_api_call) + + assert_match_object(cur_sets.api_call, fake_api_call) + assert_object_lists_match( + cur_sets.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncCurationSets object is initialized correctly.""" + cur_sets = AsyncCurationSets(fake_async_api_call) + + assert_match_object(cur_sets.api_call, fake_async_api_call) + assert_object_lists_match( + cur_sets.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + + +def test_actual_upsert( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, +) -> None: + """Test that the CurationSets object can upsert a curation set on Typesense Server.""" + response = actual_curation_sets["products"].upsert( + { + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + "excludes": [{"id": "999"}], + } + ] + }, + ) + + assert response == { + "items": [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ], + "name": "products", + } + + +def test_actual_retrieve( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSets object can retrieve curation sets from Typesense Server.""" + response = actual_curation_sets.retrieve() + + assert isinstance(response, list) + assert_to_contain_object( + response[0], + { + "name": "products", + }, + ) + + +async def test_actual_upsert_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, +) -> None: + """Test that the AsyncCurationSets object can upsert a curation set on Typesense Server.""" + response = await actual_async_curation_sets["products"].upsert( + { + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + "excludes": [{"id": "999"}], + } + ] + }, + ) + + assert response == { + "items": [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ], + "name": "products", + } + + +async def test_actual_retrieve_async( + actual_async_curation_sets: AsyncCurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the AsyncCurationSets object can retrieve curation sets from Typesense Server.""" + response = await actual_async_curation_sets.retrieve() + + assert isinstance(response, list) + assert_to_contain_object( + response[0], + { + "name": "products", + }, + ) diff --git a/tests/debug_test.py b/tests/debug_test.py new file mode 100644 index 0000000..d96fccb --- /dev/null +++ b/tests/debug_test.py @@ -0,0 +1,63 @@ +"""Tests for the Debug class.""" + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.debug import AsyncDebug +from typesense.sync.debug import Debug + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Debug object is initialized correctly.""" + debug = Debug( + fake_api_call, + ) + + assert_match_object(debug.api_call, fake_api_call) + assert_object_lists_match( + debug.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + debug.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert debug.resource_path == "/debug" # noqa: WPS437 + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncDebug object is initialized correctly.""" + debug = AsyncDebug(fake_async_api_call) + + assert_match_object(debug.api_call, fake_async_api_call) + assert_object_lists_match( + debug.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + debug.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert debug.resource_path == "/debug" # noqa: WPS437 + + +def test_actual_retrieve(actual_debug: Debug) -> None: + """Test that the Debug object can retrieve a debug on Typesense server and verify response structure.""" + response = actual_debug.retrieve() + + assert "state" in response + assert "version" in response + + assert isinstance(response["state"], int) + assert isinstance(response["version"], str) + + +async def test_actual_retrieve_async(actual_async_debug: AsyncDebug) -> None: + """Test that the AsyncDebug object can retrieve a debug on Typesense server and verify response structure.""" + response = await actual_async_debug.retrieve() + + assert "state" in response + assert "version" in response + + assert isinstance(response["state"], int) + assert isinstance(response["version"], str) diff --git a/tests/document_test.py b/tests/document_test.py new file mode 100644 index 0000000..1bb6de1 --- /dev/null +++ b/tests/document_test.py @@ -0,0 +1,184 @@ +"""Tests for the Document class.""" + + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.document import AsyncDocument +from typesense.async_.documents import AsyncDocuments +from typesense.sync.document import Document +from typesense.sync.documents import Documents +from typesense.exceptions import ObjectNotFound + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Document object is initialized correctly.""" + document = Document(fake_api_call, "companies", "0") + + assert document.document_id == "0" + assert document.collection_name == "companies" + assert_match_object(document.api_call, fake_api_call) + assert_object_lists_match( + document.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + document.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + document._endpoint_path == "/collections/companies/documents/0" # noqa: WPS437 + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncDocument object is initialized correctly.""" + document = AsyncDocument(fake_async_api_call, "companies", "0") + + assert document.document_id == "0" + assert document.collection_name == "companies" + assert_match_object(document.api_call, fake_async_api_call) + assert_object_lists_match( + document.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + document.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert ( + document._endpoint_path == "/collections/companies/documents/0" # noqa: WPS437 + ) + + +def test_actual_update( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can update an document on Typesense Server.""" + response = actual_documents["0"].update( + {"company_name": "Company", "num_employees": 20}, + { + "action": "update", + }, + ) + + assert_to_contain_object( + response, + {"id": "0", "company_name": "Company", "num_employees": 20}, + ) + + +def test_actual_retrieve( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can retrieve an document from Typesense Server.""" + response = actual_documents["0"].retrieve() + + assert_to_contain_object( + response, + {"id": "0", "company_name": "Company", "num_employees": 10}, + ) + + +def test_actual_delete( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can delete an document from Typesense Server.""" + response = actual_documents["0"].delete() + + assert response == { + "id": "0", + "company_name": "Company", + "num_employees": 10, + } + + +def test_actual_delete_non_existent( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can delete an document from Typesense Server.""" + with pytest.raises(ObjectNotFound): + actual_documents["1"].delete() + + +def test_actual_delete_non_existent_ignore_not_found( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can delete an document from Typesense Server.""" + response = actual_documents["1"].delete( + delete_parameters={"ignore_not_found": True}, + ) + + assert response == {"id": "1"} + + +async def test_actual_update_async( + actual_async_documents: AsyncDocuments, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncDocument object can update an document on Typesense Server.""" + response = await actual_async_documents["0"].update( + {"company_name": "Company", "num_employees": 20}, + { + "action": "update", + }, + ) + + assert_to_contain_object( + response, + {"id": "0", "company_name": "Company", "num_employees": 20}, + ) + + +async def test_actual_retrieve_async( + actual_async_documents: AsyncDocuments, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncDocument object can retrieve an document from Typesense Server.""" + response = await actual_async_documents["0"].retrieve() + + assert_to_contain_object( + response, + {"id": "0", "company_name": "Company", "num_employees": 10}, + ) + + +async def test_actual_delete_async( + actual_async_documents: AsyncDocuments, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncDocument object can delete an document from Typesense Server.""" + response = await actual_async_documents["0"].delete() + + assert response == { + "id": "0", + "company_name": "Company", + "num_employees": 10, + } diff --git a/tests/documents_test.py b/tests/documents_test.py new file mode 100644 index 0000000..058d1ed --- /dev/null +++ b/tests/documents_test.py @@ -0,0 +1,580 @@ +"""Tests for the Documents class.""" + +import json +import logging +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +import pytest +from pytest_mock import MockFixture + +from tests.fixtures.document_fixtures import Companies +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.documents import AsyncDocuments +from typesense.sync.documents import Documents +from typesense.exceptions import InvalidParameter, TypesenseClientError + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Documents object is initialized correctly.""" + documents = Documents(fake_api_call, "companies") + + assert_match_object(documents.api_call, fake_api_call) + assert_object_lists_match( + documents.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + documents.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not documents.documents + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncDocuments object is initialized correctly.""" + documents = AsyncDocuments(fake_async_api_call, "companies") + + assert_match_object(documents.api_call, fake_async_api_call) + assert_object_lists_match( + documents.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + documents.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not documents.documents + + +def test_get_missing_document(fake_documents: Documents) -> None: + """Test that the Documents object can get a missing document.""" + document = fake_documents["1"] + + assert_match_object(document.api_call, fake_documents.api_call) + assert_object_lists_match( + document.api_call.node_manager.nodes, fake_documents.api_call.node_manager.nodes + ) + assert_match_object( + document.api_call.config.nearest_node, + fake_documents.api_call.config.nearest_node, + ) + assert ( + document._endpoint_path == "/collections/companies/documents/1" # noqa: WPS437 + ) + + +def test_get_missing_document_async(fake_async_documents: AsyncDocuments) -> None: + """Test that the AsyncDocuments object can get a missing document.""" + document = fake_async_documents["1"] + + assert_match_object(document.api_call, fake_async_documents.api_call) + assert_object_lists_match( + document.api_call.node_manager.nodes, + fake_async_documents.api_call.node_manager.nodes, + ) + assert_match_object( + document.api_call.config.nearest_node, + fake_async_documents.api_call.config.nearest_node, + ) + assert ( + document._endpoint_path == "/collections/companies/documents/1" # noqa: WPS437 + ) + + +def test_get_existing_document(fake_documents: Documents) -> None: + """Test that the Documents object can get an existing document.""" + document = fake_documents["1"] + fetched_document = fake_documents["1"] + + assert len(fake_documents.documents) == 1 + + assert document is fetched_document + + +def test_upsert( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object can upsert a document on Typesense server.""" + company: Companies = { + "company_name": "company", + "id": "0", + "num_employees": 10, + } + spy = mocker.spy(actual_api_call, "post") + response = actual_documents.upsert(company) + + assert response == company + spy.assert_called_once_with( + "/collections/companies/documents/", + body=company, + params={"action": "upsert"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) + + updated_company: Companies = { + "company_name": "company_updated", + "id": "0", + "num_employees": 10, + } + + response_update = actual_documents.upsert( + updated_company, + {"action": "update"}, + ) + + assert response_update == updated_company + assert spy.call_count == 2 + spy.assert_called_with( + "/collections/companies/documents/", + body=updated_company, + params={"action": "upsert"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) + + +def test_update( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can update a document on Typesense server.""" + response = actual_documents.update( + {"company_name": "company_updated", "num_employees": 10}, + {"filter_by": "company_name:company"}, + ) + + assert response == {"num_updated": 1} + + +def test_create_many( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the Documents object can create many documents on Typesense server.""" + companies: typing.List[Companies] = [ + { + "company_name": "Typesense", + "id": "1", + "num_employees": 25, + }, + { + "company_name": "Typesense", + "id": "2", + "num_employees": 25, + }, + ] + with caplog.at_level(logging.WARNING): + response = actual_documents.create_many(companies) + expected = [{"success": True} for _ in companies] + assert response == expected + assert "`create_many` is deprecated: please use `import_`." in caplog.text + + +def test_export( + actual_documents: Documents[Companies], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can export a document from Typesense server.""" + response = actual_documents.export() + assert response == '{"company_name":"Company","id":"0","num_employees":10}' + + +def test_delete( + actual_documents: Documents[Companies], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can delete a document from Typesense server.""" + response = actual_documents.delete({"filter_by": "company_name:Company"}) + assert response == {"num_deleted": 1} + + +def test_truncate( + actual_documents: Documents[Companies], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can delete a document from Typesense server.""" + response = actual_documents.delete({"truncate": True}) + assert response == {"num_deleted": 1} + + +def test_delete_ignore_missing( + actual_documents: Documents[Companies], + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can ignore a missing document from Typesense server.""" + response = actual_documents.delete( + {"filter_by": "company_name:missing", "ignore_not_found": True}, + ) + assert response == {"num_deleted": 0} + + +def test_import_fail( + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object doesn't throw an error when importing documents.""" + wrong_company: Companies = {"company_name": "Wrong", "id": "0", "num_employees": 0} + companies = generate_companies + [wrong_company] + request_spy = mocker.spy(actual_documents, "_bulk_import") + response = actual_documents.import_(companies) + + expected: typing.List[typing.Dict[str, typing.Union[str, bool, int]]] = [ + {"success": True} for _ in generate_companies + ] + expected.append( + { + "code": 409, + "error": "A document with id 0 already exists.", + "success": False, + }, + ) + assert request_spy.call_count == 1 + assert response == expected + + +def test_import_empty( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object throws when importing an empty list of documents.""" + with pytest.raises(TypesenseClientError): + actual_documents.import_(documents=[]) + + +def test_import_json_fail( + actual_documents: Documents[Companies], + generate_companies: typing.List[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object throws when importing invalid JSON.""" + mocker.patch( + "json.loads", + side_effect=json.JSONDecodeError("Expecting value", "doc", 0), + ) + + with pytest.raises(TypesenseClientError): + actual_documents.import_(generate_companies) + + +def test_import_batch_size( + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object can import documents in batches.""" + batch_size = 5 + import_spy = mocker.spy(actual_documents, "import_") + batch_import_spy = mocker.spy(actual_documents, "_bulk_import") + request_spy = mocker.spy(actual_api_call, "post") + response = actual_documents.import_(generate_companies, batch_size=batch_size) + + expected = [{"success": True} for _ in generate_companies] + assert import_spy.call_count == 1 + assert batch_import_spy.call_count == len(generate_companies) // batch_size + assert request_spy.call_count == len(generate_companies) // batch_size + assert response == expected + + +def test_import_return_docs( + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], + mocker: MockFixture, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can return documents when importing.""" + request_spy = mocker.spy(actual_documents, "_bulk_import") + response = actual_documents.import_(generate_companies, {"return_doc": True}) + expected = [ + {"success": True, "document": company} for company in generate_companies + ] + + assert request_spy.call_count == 1 + assert response == expected + + +def test_import_return_ids( + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], + mocker: MockFixture, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can return document IDs when importing.""" + request_spy = mocker.spy(actual_documents, "_bulk_import") + response = actual_documents.import_(generate_companies, {"return_id": True}) + expected = [ + {"success": True, "id": company.get("id")} for company in generate_companies + ] + assert request_spy.call_count == 1 + assert response == expected + + +def test_import_return_ids_and_docs( + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], + mocker: MockFixture, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can return document IDs and documents when importing.""" + request_spy = mocker.spy(actual_documents, "_bulk_import") + response = actual_documents.import_( + generate_companies, + {"return_id": True, "return_doc": True}, + ) + expected = [ + {"success": True, "document": company, "id": company.get("id")} + for company in generate_companies + ] + assert request_spy.call_count == 1 + assert response == expected + + +def test_import_jsonl( + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], + delete_all: None, + create_collection: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the Documents object can import documents in JSONL format.""" + companies_in_jsonl_format = "\n".join( + [ + "".join( + [ + '{"id": "', + company["id"], + '", ', + '"company_name": "', + company["company_name"], + '", ', + '"num_employees": ', + str(company["num_employees"]), + "}", + ], + ) + for company in generate_companies + ], + ) + + expected = "\n".join(['{"success":true}' for _ in generate_companies]) + + with caplog.at_level(logging.WARNING): + response = actual_documents.import_jsonl(companies_in_jsonl_format) + assert response == expected + assert "`import_jsonl` is deprecated: please use `import_`." in caplog.text + + +def test_search( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can search for documents on Typesense server.""" + response = actual_documents.search( + { + "q": "com", + "query_by": "company_name", + }, + ) + + assert_to_contain_keys( + response, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_search_array( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the SearchParameters can have arrays that are concatenated before request.""" + response = actual_documents.search( + { + "q": "com", + "query_by": ["company_name"], + }, + ) + + assert_to_contain_keys( + response, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_search_invalid_parameters( + actual_documents: Documents[Companies], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object throws when invalid parameters are passed to search.""" + with pytest.raises(InvalidParameter): + actual_documents.search( + { + "q": "com", + "query_by": "company_name", + "invalid": [ + Companies(company_name="", id="", num_employees=0), + ], + }, + ) + + with pytest.raises(InvalidParameter): + actual_documents.search( + { + "q": "com", + "query_by": "company_name", + "invalid": Companies(company_name="", id="", num_employees=0), + }, + ) + + +async def test_upsert_async( + actual_async_documents: AsyncDocuments[Companies], + delete_all: None, + create_collection: None, +) -> None: + """Test that the AsyncDocuments object can upsert a document on Typesense server.""" + company: Companies = { + "company_name": "company", + "id": "0", + "num_employees": 10, + } + response = await actual_async_documents.upsert(company) + + assert response == company + + +async def test_export_async( + actual_async_documents: AsyncDocuments[Companies], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncDocuments object can export a document from Typesense server.""" + response = await actual_async_documents.export() + assert response == '{"company_name":"Company","id":"0","num_employees":10}' + + +async def test_delete_async( + actual_async_documents: AsyncDocuments[Companies], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncDocuments object can delete a document from Typesense server.""" + response = await actual_async_documents.delete({"filter_by": "company_name:Company"}) + assert response == {"num_deleted": 1} + + +async def test_search_async( + actual_async_documents: AsyncDocuments[Companies], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncDocuments object can search for documents on Typesense server.""" + response = await actual_async_documents.search( + { + "q": "com", + "query_by": "company_name", + }, + ) + + assert_to_contain_keys( + response, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) diff --git a/tests/fixtures/alias_fixtures.py b/tests/fixtures/alias_fixtures.py new file mode 100644 index 0000000..1c14a1e --- /dev/null +++ b/tests/fixtures/alias_fixtures.py @@ -0,0 +1,89 @@ +"""Fixtures for alias tests.""" + +import pytest +import requests + +from typesense.sync.alias import Alias +from typesense.sync.aliases import Aliases +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.alias import AsyncAlias +from typesense.async_.aliases import AsyncAliases + + +@pytest.fixture(scope="function", name="delete_all_aliases") +def clear_typesense_aliases() -> None: + """Remove all aliases from the Typesense server.""" + url = "http://localhost:8108/aliases" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + aliases = response.json() + + # Delete each alias + for alias in aliases["aliases"]: + alias_name = alias.get("name") + delete_url = f"{url}/{alias_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_alias") +def create_alias_fixture(create_collection: None) -> None: + """Create an alias in the Typesense server.""" + url = "http://localhost:8108/aliases/company_alias" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + alias_data = { + "collection_name": "companies", + } + + alias_creation_response = requests.put( + url, + headers=headers, + json=alias_data, + timeout=3, + ) + alias_creation_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_aliases") +def actual_aliases_fixture(actual_api_call: ApiCall) -> Aliases: + """Return a Aliases object using a real API.""" + return Aliases(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_aliases") +def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: + """Return a Aliases object with test values.""" + return Aliases(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_alias") +def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: + """Return a Alias object with test values.""" + return Alias(fake_api_call, "company_alias") + + +@pytest.fixture(scope="function", name="actual_async_aliases") +def actual_async_aliases_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncAliases: + """Return a AsyncAliases object using a real API.""" + return AsyncAliases(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_aliases") +def fake_async_aliases_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncAliases: + """Return a AsyncAliases object with test values.""" + return AsyncAliases(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_alias") +def fake_async_alias_fixture(fake_async_api_call: AsyncApiCall) -> AsyncAlias: + """Return a AsyncAlias object with test values.""" + return AsyncAlias(fake_async_api_call, "company_alias") diff --git a/tests/fixtures/analytics_fixtures.py b/tests/fixtures/analytics_fixtures.py new file mode 100644 index 0000000..784ea2d --- /dev/null +++ b/tests/fixtures/analytics_fixtures.py @@ -0,0 +1,123 @@ +"""Fixtures for Analytics (current) tests.""" + +import pytest +import requests + +from typesense.sync.analytics_rule import AnalyticsRule +from typesense.sync.analytics_rules import AnalyticsRules +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.analytics_events import AsyncAnalyticsEvents +from typesense.async_.analytics_rule import AsyncAnalyticsRule +from typesense.async_.analytics_rules import AsyncAnalyticsRules + + +@pytest.fixture(scope="function", name="delete_all_analytics_rules") +def clear_typesense_analytics_rules() -> None: + """Remove all analytics rules from the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + rules = response.json() + + # v30 returns a list of rule objects + for rule in rules: + rule_name = rule.get("name") + delete_url = f"{url}/{rule_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_analytics_rule") +def create_analytics_rule_fixture( + create_collection: None, + create_query_collection: None, +) -> None: + """Create an analytics rule in the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + analytics_rule_data = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "collection": "companies", + "event_type": "search", + "params": { + "destination_collection": "companies_queries", + "limit": 1000, + }, + } + + response = requests.post(url, headers=headers, json=analytics_rule_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_analytics_rules") +def fake_analytics_rules_fixture(fake_api_call: ApiCall) -> AnalyticsRules: + """Return an AnalyticsRules object with test values.""" + return AnalyticsRules(fake_api_call) + + +@pytest.fixture(scope="function", name="actual_analytics_rules") +def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: + """Return an AnalyticsRules object using a real API.""" + return AnalyticsRules(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_analytics_rule") +def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: + """Return an AnalyticsRule object with test values.""" + return AnalyticsRule(fake_api_call, "company_analytics_rule") + + +@pytest.fixture(scope="function", name="create_query_collection") +def create_query_collection_fixture() -> None: + """Create a query collection for analytics rules in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + query_collection_data = { + "name": "companies_queries", + "fields": [ + { + "name": "q", + "type": "string", + }, + { + "name": "count", + "type": "int32", + }, + ], + } + + response = requests.post( + url, + headers=headers, + json=query_collection_data, + timeout=3, + ) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_async_analytics_rules") +def fake_async_analytics_rules_fixture(fake_async_api_call: AsyncApiCall) -> AsyncAnalyticsRules: + """Return an AsyncAnalyticsRules object with test values.""" + return AsyncAnalyticsRules(fake_async_api_call) + + +@pytest.fixture(scope="function", name="actual_async_analytics_rules") +def actual_async_analytics_rules_fixture(actual_async_api_call: AsyncApiCall) -> AsyncAnalyticsRules: + """Return an AsyncAnalyticsRules object using a real API.""" + return AsyncAnalyticsRules(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_analytics_rule") +def fake_async_analytics_rule_fixture(fake_async_api_call: AsyncApiCall) -> AsyncAnalyticsRule: + """Return an AsyncAnalyticsRule object with test values.""" + return AsyncAnalyticsRule(fake_async_api_call, "company_analytics_rule") + + +@pytest.fixture(scope="function", name="actual_async_analytics_events") +def actual_async_analytics_events_fixture(actual_async_api_call: AsyncApiCall) -> AsyncAnalyticsEvents: + """Return an AsyncAnalyticsEvents object using a real API.""" + return AsyncAnalyticsEvents(actual_async_api_call) diff --git a/tests/fixtures/analytics_rule_v1_fixtures.py b/tests/fixtures/analytics_rule_v1_fixtures.py new file mode 100644 index 0000000..099caeb --- /dev/null +++ b/tests/fixtures/analytics_rule_v1_fixtures.py @@ -0,0 +1,89 @@ +"""Fixtures for the Analytics Rules V1 tests.""" + +import pytest +import requests + +from typesense.sync.analytics_rule_v1 import AnalyticsRuleV1 +from typesense.sync.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.analytics_rule_v1 import AsyncAnalyticsRuleV1 +from typesense.async_.analytics_rules_v1 import AsyncAnalyticsRulesV1 + + +@pytest.fixture(scope="function", name="delete_all_analytics_rules_v1") +def clear_typesense_analytics_rules_v1() -> None: + """Remove all analytics_rules from the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of rules + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + analytics_rules = response.json() + + # Delete each analytics_rule + for analytics_rule_set in analytics_rules["rules"]: + analytics_rule_id = analytics_rule_set.get("name") + delete_url = f"{url}/{analytics_rule_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_analytics_rule_v1") +def create_analytics_rule_v1_fixture( + create_collection: None, + create_query_collection: None, +) -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + analytics_rule_data = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + } + + response = requests.post(url, headers=headers, json=analytics_rule_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_analytics_rules_v1") +def fake_analytics_rules_v1_fixture(fake_api_call: ApiCall) -> AnalyticsRulesV1: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRulesV1(fake_api_call) + + +@pytest.fixture(scope="function", name="actual_analytics_rules_v1") +def actual_analytics_rules_v1_fixture(actual_api_call: ApiCall) -> AnalyticsRulesV1: + """Return a AnalyticsRules object using a real API.""" + return AnalyticsRulesV1(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_analytics_rule_v1") +def fake_analytics_rule_v1_fixture(fake_api_call: ApiCall) -> AnalyticsRuleV1: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRuleV1(fake_api_call, "company_analytics_rule") + + +@pytest.fixture(scope="function", name="fake_async_analytics_rules_v1") +def fake_async_analytics_rules_v1_fixture(fake_async_api_call: AsyncApiCall) -> AsyncAnalyticsRulesV1: + """Return a AsyncAnalyticsRulesV1 object with test values.""" + return AsyncAnalyticsRulesV1(fake_async_api_call) + + +@pytest.fixture(scope="function", name="actual_async_analytics_rules_v1") +def actual_async_analytics_rules_v1_fixture(actual_async_api_call: AsyncApiCall) -> AsyncAnalyticsRulesV1: + """Return a AsyncAnalyticsRulesV1 object using a real API.""" + return AsyncAnalyticsRulesV1(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_analytics_rule_v1") +def fake_async_analytics_rule_v1_fixture(fake_async_api_call: AsyncApiCall) -> AsyncAnalyticsRuleV1: + """Return a AsyncAnalyticsRuleV1 object with test values.""" + return AsyncAnalyticsRuleV1(fake_async_api_call, "company_analytics_rule") diff --git a/tests/fixtures/api_call_fixtures.py b/tests/fixtures/api_call_fixtures.py new file mode 100644 index 0000000..69cabdf --- /dev/null +++ b/tests/fixtures/api_call_fixtures.py @@ -0,0 +1,36 @@ +"""Fixtures for ApiCall tests.""" + +import pytest + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.configuration import Configuration + + +@pytest.fixture(scope="function", name="fake_api_call") +def fake_api_call_fixture( + fake_config: Configuration, +) -> ApiCall: + """Return an ApiCall object with test values.""" + return ApiCall(fake_config) + + +@pytest.fixture(scope="function", name="actual_api_call") +def actual_api_call_fixture(actual_config: Configuration) -> ApiCall: + """Return an ApiCall object using a real API.""" + return ApiCall(actual_config) + + +@pytest.fixture(scope="function", name="actual_async_api_call") +def actual_async_api_call_fixture(actual_config: Configuration) -> AsyncApiCall: + """Return an AsyncApiCall object using a real API.""" + return AsyncApiCall(actual_config) + + +@pytest.fixture(scope="function", name="fake_async_api_call") +def fake_api_call_async_fixture( + fake_config: Configuration, +) -> AsyncApiCall: + """Return an ApiCall object with test values.""" + return AsyncApiCall(fake_config) + diff --git a/tests/fixtures/client_fixtures.py b/tests/fixtures/client_fixtures.py new file mode 100644 index 0000000..6c9771e --- /dev/null +++ b/tests/fixtures/client_fixtures.py @@ -0,0 +1,20 @@ +"""Fixtures for the client tests.""" + +import pytest + +from typesense.sync.client import Client +from typesense.configuration import ConfigDict + + +@pytest.fixture(scope="function", name="fake_client") +def fake_client_fixture( + fake_config_dict: ConfigDict, +) -> Client: + """Return a client object with test values.""" + return Client(fake_config_dict) + + +@pytest.fixture(scope="function", name="actual_client") +def actual_client_fixture(actual_config_dict: ConfigDict) -> Client: + """Return a client object using a real API.""" + return Client(actual_config_dict) diff --git a/tests/fixtures/collections_fixtures.py b/tests/fixtures/collections_fixtures.py new file mode 100644 index 0000000..383ec98 --- /dev/null +++ b/tests/fixtures/collections_fixtures.py @@ -0,0 +1,118 @@ +"""Fixtures for Collections tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.collection import AsyncCollection +from typesense.async_.collections import AsyncCollections +from typesense.sync.collection import Collection +from typesense.sync.collections import Collections + + +@pytest.fixture(scope="function", name="delete_all") +def clear_typesense_collections() -> None: + """Remove all collections from the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + collections = response.json() + + # Delete each collection + for collection in collections: + collection_name = collection["name"] + delete_url = f"{url}/{collection_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_collection") +def create_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_another_collection") +def create_another_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "companies_2", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_collections") +def actual_collections_fixture(actual_api_call: ApiCall) -> Collections: + """Return a Collections object using a real API.""" + return Collections(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_async_collections") +def actual_async_collections_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncCollections: + """Return a Collections object using a real API.""" + return AsyncCollections(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_collections") +def fake_collections_fixture(fake_api_call: ApiCall) -> Collections: + """Return a Collections object with test values.""" + return Collections(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_async_collections") +def fake_collections_async_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncCollections: + """Return a Collections object with test values.""" + return AsyncCollections(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_collection") +def fake_collection_fixture(fake_api_call: ApiCall) -> Collection: + """Return a Collection object with test values.""" + return Collection(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_collection") +def fake_async_collection_fixture(fake_async_api_call: AsyncApiCall) -> AsyncCollection: + """Return a Collection object with test values.""" + return AsyncCollection(fake_async_api_call, "companies") diff --git a/tests/fixtures/configuration_fixtures.py b/tests/fixtures/configuration_fixtures.py new file mode 100644 index 0000000..713f403 --- /dev/null +++ b/tests/fixtures/configuration_fixtures.py @@ -0,0 +1,71 @@ +"""Fixtures for Configuration tests.""" + +import pytest + +from typesense.configuration import ConfigDict, Configuration + + +@pytest.fixture(scope="function", name="fake_config_dict") +def fake_config_dict_fixture() -> ConfigDict: + """Return a dictionary with test values.""" + return { + "api_key": "test-api-key", + "nodes": [ + { + "host": "node0", + "port": 8108, + "protocol": "http", + }, + { + "host": "node1", + "port": 8108, + "protocol": "http", + }, + { + "host": "node2", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "nearest", + "port": 8108, + "protocol": "http", + }, + "num_retries": 3, + "healthcheck_interval_seconds": 60, + "retry_interval_seconds": 0.001, + "connection_timeout_seconds": 0.001, + "verify": True, + } + + +@pytest.fixture(scope="function", name="actual_config_dict") +def actual_config_dict_fixture() -> ConfigDict: + """Return a dictionary with test values.""" + return { + "api_key": "xyz", + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + } + + +@pytest.fixture(scope="function", name="fake_config") +def fake_config_fixture(fake_config_dict: ConfigDict) -> Configuration: + """Return a Configuration object with test values.""" + return Configuration( + config_dict=fake_config_dict, + ) + + +@pytest.fixture(scope="function", name="actual_config") +def actual_config_fixture(actual_config_dict: ConfigDict) -> Configuration: + """Return a Configuration object using a real API.""" + return Configuration( + config_dict=actual_config_dict, + ) diff --git a/tests/fixtures/conversation_model_fixtures.py b/tests/fixtures/conversation_model_fixtures.py new file mode 100644 index 0000000..eaa277d --- /dev/null +++ b/tests/fixtures/conversation_model_fixtures.py @@ -0,0 +1,134 @@ +"""Fixtures for the conversation model tests.""" + +import os + +import pytest +import requests +from dotenv import load_dotenv + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.conversation_model import AsyncConversationModel +from typesense.async_.conversations_models import AsyncConversationsModels +from typesense.sync.conversation_model import ConversationModel +from typesense.sync.conversations_models import ConversationsModels + +load_dotenv() + + +@pytest.fixture(scope="function", name="delete_all_conversations_models") +def clear_typesense_conversations_models() -> None: + """Remove all conversations_models from the Typesense server.""" + url = "http://localhost:8108/conversations/models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + conversations_models = response.json() + + # Delete each conversation model + for conversation_model in conversations_models: + conversation_model_id = conversation_model.get("id") + delete_url = f"{url}/{conversation_model_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_conversations_model") +def create_conversations_model_fixture( + create_conversation_history_collection: None, +) -> str: + """Create a conversations model in the Typesense server.""" + url = "http://localhost:8108/conversations/models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + conversations_model_data = { + "api_key": os.environ["OPEN_AI_KEY"], + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "history_collection": "conversation_store", + "system_prompt": "This is a system prompt", + } + + response = requests.post( + url, + headers=headers, + json=conversations_model_data, + timeout=3, + ) + + response.raise_for_status() + + conversation_model_id: str = response.json()["id"] + return conversation_model_id + + +@pytest.fixture(scope="function", name="fake_conversations_models") +def fake_conversations_models_fixture(fake_api_call: ApiCall) -> ConversationsModels: + """Return a Collection object with test values.""" + return ConversationsModels(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_conversation_model") +def fake_conversation_model_fixture(fake_api_call: ApiCall) -> ConversationModel: + """Return a ConversationModel object with test values.""" + return ConversationModel(fake_api_call, "conversation_model_id") + + +@pytest.fixture(scope="function", name="actual_conversations_models") +def actual_conversations_models_fixture( + actual_api_call: ApiCall, +) -> ConversationsModels: + """Return a ConversationsModels object using a real API.""" + return ConversationsModels(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_async_conversations_models") +def actual_async_conversations_models_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncConversationsModels: + """Return a AsyncConversationsModels object using a real API.""" + return AsyncConversationsModels(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_conversations_models") +def fake_async_conversations_models_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncConversationsModels: + """Return a AsyncConversationsModels object with test values.""" + return AsyncConversationsModels(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_conversation_model") +def fake_async_conversation_model_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncConversationModel: + """Return a AsyncConversationModel object with test values.""" + return AsyncConversationModel(fake_async_api_call, "conversation_model_id") + + +@pytest.fixture(scope="function", name="create_conversation_history_collection") +def create_conversation_history_collection_fixture() -> None: + """Create a collection for conversation history in the Typesense server.""" + url = "http://localhost:8108/collections" + delete_url = "http://localhost:8108/collections/conversation_store" + + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "conversation_store", + "fields": [ + {"name": "conversation_id", "type": "string"}, + {"name": "model_id", "type": "string"}, + {"name": "timestamp", "type": "int32"}, + {"name": "role", "type": "string", "index": False}, + {"name": "message", "type": "string", "index": False}, + ], + } + + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + if delete_response.status_code not in {200, 404}: + delete_response.raise_for_status() + + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() diff --git a/tests/fixtures/curation_set_fixtures.py b/tests/fixtures/curation_set_fixtures.py new file mode 100644 index 0000000..e91be1a --- /dev/null +++ b/tests/fixtures/curation_set_fixtures.py @@ -0,0 +1,98 @@ +"""Fixtures for the curation set tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.curation_set import AsyncCurationSet +from typesense.async_.curation_sets import AsyncCurationSets +from typesense.sync.curation_set import CurationSet +from typesense.sync.curation_sets import CurationSets + + +@pytest.fixture(scope="function", name="create_curation_set") +def create_curation_set_fixture() -> None: + """Create a curation set in the Typesense server.""" + url = "http://localhost:8108/curation_sets/products" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + "excludes": [{"id": "999"}], + } + ] + } + + resp = requests.put(url, headers=headers, json=data, timeout=3) + resp.raise_for_status() + + +@pytest.fixture(scope="function", name="delete_all_curation_sets") +def clear_typesense_curation_sets() -> None: + """Remove all curation sets from the Typesense server.""" + url = "http://localhost:8108/curation_sets" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + data = response.json() + + for cur in data: + name = cur.get("name") + if not name: + continue + delete_url = f"{url}/{name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_curation_sets") +def actual_curation_sets_fixture(actual_api_call: ApiCall) -> CurationSets: + """Return a CurationSets object using a real API.""" + return CurationSets(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_curation_set") +def actual_curation_set_fixture(actual_api_call: ApiCall) -> CurationSet: + """Return a CurationSet object using a real API.""" + return CurationSet(actual_api_call, "products") + + +@pytest.fixture(scope="function", name="fake_curation_sets") +def fake_curation_sets_fixture(fake_api_call: ApiCall) -> CurationSets: + """Return a CurationSets object with test values.""" + return CurationSets(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_curation_set") +def fake_curation_set_fixture(fake_api_call: ApiCall) -> CurationSet: + """Return a CurationSet object with test values.""" + return CurationSet(fake_api_call, "products") + + +@pytest.fixture(scope="function", name="actual_async_curation_sets") +def actual_async_curation_sets_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncCurationSets: + """Return a AsyncCurationSets object using a real API.""" + return AsyncCurationSets(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_curation_sets") +def fake_async_curation_sets_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncCurationSets: + """Return a AsyncCurationSets object with test values.""" + return AsyncCurationSets(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_curation_set") +def fake_async_curation_set_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncCurationSet: + """Return a AsyncCurationSet object with test values.""" + return AsyncCurationSet(fake_async_api_call, "products") diff --git a/tests/fixtures/debug_fixtures.py b/tests/fixtures/debug_fixtures.py new file mode 100644 index 0000000..a192be6 --- /dev/null +++ b/tests/fixtures/debug_fixtures.py @@ -0,0 +1,32 @@ +"""Fixtures for the Debug class tests.""" + +import pytest + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.debug import AsyncDebug +from typesense.sync.debug import Debug + + +@pytest.fixture(scope="function", name="actual_debug") +def actual_debug_fixture(actual_api_call: ApiCall) -> Debug: + """Return a Debug object using a real API.""" + return Debug(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_debug") +def fake_debug_fixture(fake_api_call: ApiCall) -> Debug: + """Return a debug object with test values.""" + return Debug(fake_api_call) + + +@pytest.fixture(scope="function", name="actual_async_debug") +def actual_async_debug_fixture(actual_async_api_call: AsyncApiCall) -> AsyncDebug: + """Return a AsyncDebug object using a real API.""" + return AsyncDebug(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_debug") +def fake_async_debug_fixture(fake_async_api_call: AsyncApiCall) -> AsyncDebug: + """Return a AsyncDebug object with test values.""" + return AsyncDebug(fake_async_api_call) diff --git a/tests/fixtures/document_fixtures.py b/tests/fixtures/document_fixtures.py new file mode 100644 index 0000000..13f504c --- /dev/null +++ b/tests/fixtures/document_fixtures.py @@ -0,0 +1,102 @@ +"""Fixtures for creating documents in the Typesense server.""" + +import sys + +import pytest +import requests +from faker import Faker +from faker.providers import company + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.document import AsyncDocument +from typesense.async_.documents import AsyncDocuments +from typesense.sync.document import Document +from typesense.sync.documents import Documents + +fake = Faker() +fake.add_provider(company) + + +@pytest.fixture(scope="function", name="create_document") +def create_document_fixture() -> None: + """Create a document in the Typesense server.""" + url = "http://localhost:8108/collections/companies/documents" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + document_data = { + "id": "0", + "company_name": "Company", + "num_employees": 10, + } + + response = requests.post(url, headers=headers, json=document_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_documents") +def actual_documents_fixture(actual_api_call: ApiCall) -> Documents: + """Return a Documents object using a real API.""" + return Documents(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_documents") +def fake_documents_fixture(fake_api_call: ApiCall) -> Documents: + """Return a Documents object with test values.""" + return Documents(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_document") +def fake_document_fixture(fake_api_call: ApiCall) -> Document: + """Return a Document object with test values.""" + return Document(fake_api_call, "companies", "0") + + +@pytest.fixture(scope="function", name="actual_async_documents") +def actual_async_documents_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncDocuments: + """Return a AsyncDocuments object using a real API.""" + return AsyncDocuments(actual_async_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_documents") +def fake_async_documents_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncDocuments: + """Return a AsyncDocuments object with test values.""" + return AsyncDocuments(fake_async_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_document") +def fake_async_document_fixture(fake_async_api_call: AsyncApiCall) -> AsyncDocument: + """Return a AsyncDocument object with test values.""" + return AsyncDocument(fake_async_api_call, "companies", "0") + + +class Companies(typing.TypedDict): + """Company data type.""" + + id: str + company_name: str + num_employees: int + + +@pytest.fixture(scope="function", name="generate_companies") +def generate_companies_fixture() -> typing.List[Companies]: + """Generate a list of companies using fake data.""" + companies: typing.List[Companies] = [] + for company_index in range(50): + companies.append( + { + "id": str(company_index), + "company_name": fake.company(), + "num_employees": fake.random_int(1, 1000), + }, + ) + + return companies diff --git a/tests/fixtures/key_fixtures.py b/tests/fixtures/key_fixtures.py new file mode 100644 index 0000000..3c252af --- /dev/null +++ b/tests/fixtures/key_fixtures.py @@ -0,0 +1,84 @@ +"""Fixtures for the key tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.key import AsyncKey +from typesense.async_.keys import AsyncKeys +from typesense.sync.key import Key +from typesense.sync.keys import Keys + + +@pytest.fixture(scope="function", name="delete_all_keys") +def clear_typesense_keys() -> None: + """Remove all keys from the Typesense server.""" + url = "http://localhost:8108/keys" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of keys + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + keys = response.json() + + # Delete each key + for key in keys["keys"]: + key_name = key.get("id") + delete_url = f"{url}/{key_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_key_id") +def create_key_fixture() -> int: + """Create a key set in the Typesense server.""" + url = "http://localhost:8108/keys" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + api_key_data = { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + } + + response = requests.post(url, headers=headers, json=api_key_data, timeout=3) + response.raise_for_status() + key_id: int = response.json()["id"] + return key_id + + +@pytest.fixture(scope="function", name="actual_keys") +def actual_keys_fixture(actual_api_call: ApiCall) -> Keys: + """Return a Keys object using a real API.""" + return Keys(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_keys") +def fake_keys_fixture(fake_api_call: ApiCall) -> Keys: + """Return a AnalyticsRule object with test values.""" + return Keys(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_key") +def fake_key_fixture(fake_api_call: ApiCall) -> Key: + """Return a Key object with test values.""" + return Key(fake_api_call, 1) + + +@pytest.fixture(scope="function", name="actual_async_keys") +def actual_async_keys_fixture(actual_async_api_call: AsyncApiCall) -> AsyncKeys: + """Return a AsyncKeys object using a real API.""" + return AsyncKeys(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_keys") +def fake_async_keys_fixture(fake_async_api_call: AsyncApiCall) -> AsyncKeys: + """Return a AsyncKeys object with test values.""" + return AsyncKeys(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_key") +def fake_async_key_fixture(fake_async_api_call: AsyncApiCall) -> AsyncKey: + """Return a AsyncKey object with test values.""" + return AsyncKey(fake_async_api_call, 1) diff --git a/tests/fixtures/metrics_fixtures.py b/tests/fixtures/metrics_fixtures.py new file mode 100644 index 0000000..6165de6 --- /dev/null +++ b/tests/fixtures/metrics_fixtures.py @@ -0,0 +1,26 @@ +"""Fixtures for the Metrics class tests.""" + +import pytest + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.metrics import AsyncMetrics +from typesense.sync.metrics import Metrics + + +@pytest.fixture(scope="function", name="actual_metrics") +def actual_debug_fixture(actual_api_call: ApiCall) -> Metrics: + """Return a Debug object using a real API.""" + return Metrics(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_async_metrics") +def actual_async_metrics_fixture(actual_async_api_call: AsyncApiCall) -> AsyncMetrics: + """Return a AsyncMetrics object using a real API.""" + return AsyncMetrics(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_metrics") +def fake_async_metrics_fixture(fake_async_api_call: AsyncApiCall) -> AsyncMetrics: + """Return a AsyncMetrics object with test values.""" + return AsyncMetrics(fake_async_api_call) diff --git a/tests/fixtures/multi_search_fixtures.py b/tests/fixtures/multi_search_fixtures.py new file mode 100644 index 0000000..70ba251 --- /dev/null +++ b/tests/fixtures/multi_search_fixtures.py @@ -0,0 +1,30 @@ +"""Fixtures for the MultiSearch class.""" + +import pytest + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.multi_search import AsyncMultiSearch +from typesense.sync.multi_search import MultiSearch + + +@pytest.fixture(scope="function", name="actual_multi_search") +def actual_multi_search_fixture(actual_api_call: ApiCall) -> MultiSearch: + """Return a MultiSearch object using a real API.""" + return MultiSearch(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_async_multi_search") +def actual_async_multi_search_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncMultiSearch: + """Return a AsyncMultiSearch object using a real API.""" + return AsyncMultiSearch(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_multi_search") +def fake_async_multi_search_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncMultiSearch: + """Return a AsyncMultiSearch object with test values.""" + return AsyncMultiSearch(fake_async_api_call) diff --git a/tests/fixtures/nl_search_model_fixtures.py b/tests/fixtures/nl_search_model_fixtures.py new file mode 100644 index 0000000..3078040 --- /dev/null +++ b/tests/fixtures/nl_search_model_fixtures.py @@ -0,0 +1,105 @@ +"""Fixtures for the NL search model tests.""" + +import os + +import pytest +import requests +from dotenv import load_dotenv + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.nl_search_model import AsyncNLSearchModel +from typesense.async_.nl_search_models import AsyncNLSearchModels +from typesense.sync.nl_search_model import NLSearchModel +from typesense.sync.nl_search_models import NLSearchModels + +load_dotenv() + + +@pytest.fixture(scope="function", name="delete_all_nl_search_models") +def clear_typesense_nl_search_models() -> None: + """Remove all nl_search_models from the Typesense server.""" + url = "http://localhost:8108/nl_search_models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of models + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + nl_search_models = response.json() + + # Delete each NL search model + for nl_search_model in nl_search_models: + model_id = nl_search_model.get("id") + delete_url = f"{url}/{model_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_nl_search_model") +def create_nl_search_model_fixture() -> str: + """Create an NL search model in the Typesense server.""" + url = "http://localhost:8108/nl_search_models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + nl_search_model_data = { + "api_key": os.environ.get("OPEN_AI_KEY", "test-api-key"), + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is a system prompt for NL search", + } + + response = requests.post( + url, + headers=headers, + json=nl_search_model_data, + timeout=3, + ) + + response.raise_for_status() + + model_id: str = response.json()["id"] + return model_id + + +@pytest.fixture(scope="function", name="fake_nl_search_models") +def fake_nl_search_models_fixture(fake_api_call: ApiCall) -> NLSearchModels: + """Return an NLSearchModels object with test values.""" + return NLSearchModels(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_nl_search_model") +def fake_nl_search_model_fixture(fake_api_call: ApiCall) -> NLSearchModel: + """Return an NLSearchModel object with test values.""" + return NLSearchModel(fake_api_call, "nl_search_model_id") + + +@pytest.fixture(scope="function", name="actual_nl_search_models") +def actual_nl_search_models_fixture( + actual_api_call: ApiCall, +) -> NLSearchModels: + """Return an NLSearchModels object using a real API.""" + return NLSearchModels(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_async_nl_search_models") +def actual_async_nl_search_models_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncNLSearchModels: + """Return a AsyncNLSearchModels object using a real API.""" + return AsyncNLSearchModels(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_nl_search_models") +def fake_async_nl_search_models_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncNLSearchModels: + """Return a AsyncNLSearchModels object with test values.""" + return AsyncNLSearchModels(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_nl_search_model") +def fake_async_nl_search_model_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncNLSearchModel: + """Return a AsyncNLSearchModel object with test values.""" + return AsyncNLSearchModel(fake_async_api_call, "nl_search_model_id") diff --git a/tests/fixtures/operation_fixtures.py b/tests/fixtures/operation_fixtures.py new file mode 100644 index 0000000..a6fade9 --- /dev/null +++ b/tests/fixtures/operation_fixtures.py @@ -0,0 +1,36 @@ +"""Fixtures for the Operations tests.""" + +import pytest + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.operations import AsyncOperations +from typesense.sync.operations import Operations + + +@pytest.fixture(scope="function", name="actual_operations") +def actual_operations_fixture(actual_api_call: ApiCall) -> Operations: + """Return a Operations object using a real API.""" + return Operations(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_operations") +def fake_operations_fixture(fake_api_call: ApiCall) -> Operations: + """Return a Collection object with test values.""" + return Operations(fake_api_call) + + +@pytest.fixture(scope="function", name="actual_async_operations") +def actual_async_operations_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncOperations: + """Return a AsyncOperations object using a real API.""" + return AsyncOperations(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_operations") +def fake_async_operations_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncOperations: + """Return a AsyncOperations object with test values.""" + return AsyncOperations(fake_async_api_call) diff --git a/tests/fixtures/override_fixtures.py b/tests/fixtures/override_fixtures.py new file mode 100644 index 0000000..6631f35 --- /dev/null +++ b/tests/fixtures/override_fixtures.py @@ -0,0 +1,65 @@ +"""Fixtures for the Overrides tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.override import AsyncOverride +from typesense.async_.overrides import AsyncOverrides +from typesense.sync.override import Override +from typesense.sync.overrides import Overrides + + +@pytest.fixture(scope="function", name="create_override") +def create_override_fixture(create_collection: None) -> None: + """Create an override in the Typesense server.""" + url = "http://localhost:8108/collections/companies/overrides/company_override" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + override_data = { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + response = requests.put(url, headers=headers, json=override_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_overrides") +def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: + """Return a Overrides object using a real API.""" + return Overrides(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_overrides") +def fake_overrides_fixture(fake_api_call: ApiCall) -> Overrides: + """Return a Override object with test values.""" + return Overrides(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_override") +def fake_override_fixture(fake_api_call: ApiCall) -> Override: + """Return a Override object with test values.""" + return Override(fake_api_call, "companies", "company_override") + + +@pytest.fixture(scope="function", name="actual_async_overrides") +def actual_async_overrides_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncOverrides: + """Return a AsyncOverrides object using a real API.""" + return AsyncOverrides(actual_async_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_overrides") +def fake_async_overrides_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncOverrides: + """Return a AsyncOverrides object with test values.""" + return AsyncOverrides(fake_async_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_override") +def fake_async_override_fixture(fake_async_api_call: AsyncApiCall) -> AsyncOverride: + """Return a AsyncOverride object with test values.""" + return AsyncOverride(fake_async_api_call, "companies", "company_override") diff --git a/tests/fixtures/stemming_fixtures.py b/tests/fixtures/stemming_fixtures.py new file mode 100644 index 0000000..f40430e --- /dev/null +++ b/tests/fixtures/stemming_fixtures.py @@ -0,0 +1,32 @@ +"""Fixtures for the Analytics Rules tests.""" + +import pytest + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.stemming import AsyncStemming +from typesense.sync.stemming import Stemming + + +@pytest.fixture(scope="function", name="actual_stemming") +def actual_stemming_fixture( + actual_api_call: ApiCall, +) -> Stemming: + """Return a Stemming object using a real API.""" + return Stemming(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_async_stemming") +def actual_async_stemming_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncStemming: + """Return a AsyncStemming object using a real API.""" + return AsyncStemming(actual_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_stemming") +def fake_async_stemming_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncStemming: + """Return a AsyncStemming object with test values.""" + return AsyncStemming(fake_async_api_call) diff --git a/tests/fixtures/stopword_fixtures.py b/tests/fixtures/stopword_fixtures.py new file mode 100644 index 0000000..74a6dbd --- /dev/null +++ b/tests/fixtures/stopword_fixtures.py @@ -0,0 +1,96 @@ +"""Fixtures for the stopword tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.stopwords import AsyncStopwords +from typesense.async_.stopwords_set import AsyncStopwordsSet +from typesense.sync.stopwords import Stopwords +from typesense.sync.stopwords_set import StopwordsSet + + +@pytest.fixture(scope="function", name="create_stopword") +def create_stopword_fixture() -> None: + """Create a stopword set in the Typesense server.""" + url = "http://localhost:8108/stopwords/company_stopwords" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + stopword_data = { + "stopwords": ["and", "is", "the"], + } + + create_stopword_response = requests.put( + url, + headers=headers, + json=stopword_data, + timeout=3, + ) + create_stopword_response.raise_for_status() + + +@pytest.fixture(scope="function", name="delete_all_stopwords") +def clear_typesense_stopwords() -> None: + """Remove all stopwords from the Typesense server.""" + url = "http://localhost:8108/stopwords" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of stopwords + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + stopwords = response.json() + + # Delete each stopword + for stopword_set in stopwords["stopwords"]: + stopword_id = stopword_set.get("id") + delete_url = f"{url}/{stopword_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_stopwords") +def actual_stopwords_fixture(actual_api_call: ApiCall) -> Stopwords: + """Return a Stopwords object using a real API.""" + return Stopwords(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_stopwords_set") +def actual_stopwords_set_fixture(actual_api_call: ApiCall) -> StopwordsSet: + """Return a Stopwords object using a real API.""" + return StopwordsSet(actual_api_call, "company_stopwords") + + +@pytest.fixture(scope="function", name="fake_stopwords") +def fake_stopwords_fixture(fake_api_call: ApiCall) -> Stopwords: + """Return a Stopwords object with test values.""" + return Stopwords(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_stopwords_set") +def fake_stopwords_set_fixture(fake_api_call: ApiCall) -> StopwordsSet: + """Return a Collection object with test values.""" + return StopwordsSet(fake_api_call, "company_stopwords") + + +@pytest.fixture(scope="function", name="actual_async_stopwords") +def actual_async_stopwords_fixture(actual_async_api_call: AsyncApiCall) -> AsyncStopwords: + """Return a AsyncStopwords object using a real API.""" + return AsyncStopwords(actual_async_api_call) + + +@pytest.fixture(scope="function", name="actual_async_stopwords_set") +def actual_async_stopwords_set_fixture(actual_async_api_call: AsyncApiCall) -> AsyncStopwordsSet: + """Return a AsyncStopwordsSet object using a real API.""" + return AsyncStopwordsSet(actual_async_api_call, "company_stopwords") + + +@pytest.fixture(scope="function", name="fake_async_stopwords") +def fake_async_stopwords_fixture(fake_async_api_call: AsyncApiCall) -> AsyncStopwords: + """Return a AsyncStopwords object with test values.""" + return AsyncStopwords(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_stopwords_set") +def fake_async_stopwords_set_fixture(fake_async_api_call: AsyncApiCall) -> AsyncStopwordsSet: + """Return a AsyncStopwordsSet object with test values.""" + return AsyncStopwordsSet(fake_async_api_call, "company_stopwords") diff --git a/tests/fixtures/synonym_fixtures.py b/tests/fixtures/synonym_fixtures.py new file mode 100644 index 0000000..ab2f8a9 --- /dev/null +++ b/tests/fixtures/synonym_fixtures.py @@ -0,0 +1,69 @@ +"""Fixtures for the synonym tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.synonym import AsyncSynonym +from typesense.async_.synonyms import AsyncSynonyms +from typesense.sync.synonym import Synonym +from typesense.sync.synonyms import Synonyms + + +@pytest.fixture(scope="function", name="create_synonym") +def create_synonym_fixture(create_collection: None) -> None: + """Create a synonym in the Typesense server.""" + url = "http://localhost:8108/collections/companies/synonyms/company_synonym" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + synonym_data = { + "synonyms": ["companies", "corporations", "firms"], + } + + create_synonym_response = requests.put( + url, + headers=headers, + json=synonym_data, + timeout=3, + ) + create_synonym_response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_synonyms") +def fake_synonyms_fixture(fake_api_call: ApiCall) -> Synonyms: + """Return a Synonyms object with test values.""" + return Synonyms(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="actual_synonyms") +def actual_synonyms_fixture(actual_api_call: ApiCall) -> Synonyms: + """Return a Synonyms object using a real API.""" + return Synonyms(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_synonym") +def fake_synonym_fixture(fake_api_call: ApiCall) -> Synonym: + """Return a Synonym object with test values.""" + return Synonym(fake_api_call, "companies", "company_synonym") + + +@pytest.fixture(scope="function", name="actual_async_synonyms") +def actual_async_synonyms_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncSynonyms: + """Return a AsyncSynonyms object using a real API.""" + return AsyncSynonyms(actual_async_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_synonyms") +def fake_async_synonyms_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncSynonyms: + """Return a AsyncSynonyms object with test values.""" + return AsyncSynonyms(fake_async_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_async_synonym") +def fake_async_synonym_fixture(fake_async_api_call: AsyncApiCall) -> AsyncSynonym: + """Return a AsyncSynonym object with test values.""" + return AsyncSynonym(fake_async_api_call, "companies", "company_synonym") diff --git a/tests/fixtures/synonym_set_fixtures.py b/tests/fixtures/synonym_set_fixtures.py new file mode 100644 index 0000000..eb1780b --- /dev/null +++ b/tests/fixtures/synonym_set_fixtures.py @@ -0,0 +1,102 @@ +"""Fixtures for the synonym set tests.""" + +import pytest +import requests + +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.synonym_set import AsyncSynonymSet +from typesense.async_.synonym_sets import AsyncSynonymSets +from typesense.sync.synonym_set import SynonymSet +from typesense.sync.synonym_sets import SynonymSets + + +@pytest.fixture(scope="function", name="create_synonym_set") +def create_synonym_set_fixture() -> None: + """Create a synonym set in the Typesense server.""" + url = "http://localhost:8108/synonym_sets/test-set" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + } + + resp = requests.put(url, headers=headers, json=data, timeout=3) + resp.raise_for_status() + + +@pytest.fixture(scope="function", name="delete_all_synonym_sets") +def clear_typesense_synonym_sets() -> None: + """Remove all synonym sets from the Typesense server.""" + url = "http://localhost:8108/synonym_sets" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of synonym sets + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + data = response.json() + + # Delete each synonym set + for synset in data: + name = synset.get("name") + if not name: + continue + delete_url = f"{url}/{name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_synonym_sets") +def actual_synonym_sets_fixture(actual_api_call: ApiCall) -> SynonymSets: + """Return a SynonymSets object using a real API.""" + return SynonymSets(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_synonym_set") +def actual_synonym_set_fixture(actual_api_call: ApiCall) -> SynonymSet: + """Return a SynonymSet object using a real API.""" + return SynonymSet(actual_api_call, "test-set") + + +@pytest.fixture(scope="function", name="fake_synonym_sets") +def fake_synonym_sets_fixture(fake_api_call: ApiCall) -> SynonymSets: + """Return a SynonymSets object with test values.""" + return SynonymSets(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_synonym_set") +def fake_synonym_set_fixture(fake_api_call: ApiCall) -> SynonymSet: + """Return a SynonymSet object with test values.""" + return SynonymSet(fake_api_call, "test-set") + + +@pytest.fixture(scope="function", name="actual_async_synonym_sets") +def actual_async_synonym_sets_fixture( + actual_async_api_call: AsyncApiCall, +) -> AsyncSynonymSets: + """Return a AsyncSynonymSets object using a real API.""" + return AsyncSynonymSets(actual_async_api_call) + + +@pytest.fixture(scope="function", name="actual_async_synonym_set") +def actual_async_synonym_set_fixture(actual_async_api_call: AsyncApiCall) -> AsyncSynonymSet: + """Return a AsyncSynonymSet object using a real API.""" + return AsyncSynonymSet(actual_async_api_call, "test-set") + + +@pytest.fixture(scope="function", name="fake_async_synonym_sets") +def fake_async_synonym_sets_fixture( + fake_async_api_call: AsyncApiCall, +) -> AsyncSynonymSets: + """Return a AsyncSynonymSets object with test values.""" + return AsyncSynonymSets(fake_async_api_call) + + +@pytest.fixture(scope="function", name="fake_async_synonym_set") +def fake_async_synonym_set_fixture(fake_async_api_call: AsyncApiCall) -> AsyncSynonymSet: + """Return a AsyncSynonymSet object with test values.""" + return AsyncSynonymSet(fake_async_api_call, "test-set") diff --git a/tests/import_test.py b/tests/import_test.py new file mode 100644 index 0000000..3be47f4 --- /dev/null +++ b/tests/import_test.py @@ -0,0 +1,109 @@ +# mypy: disable-error-code="unreachable" +"""Test that the typing_extensions module is imported when Python version < 3.11.""" + +import importlib +import sys +from collections import namedtuple + +import pytest +from pytest_mock import MockFixture + +typing_module_names = [ + "alias", + "analytics_rule_v1", + "collection", + "conversations_model", + "debug", + "document", + "key", + "multi_search", + "operations", + "override", + "stopword", + "synonym_set", + "synonym", +] + +module_names = [ + "sync.aliases", + "sync.analytics_rule_v1", + "sync.analytics_rules_v1", + "sync.api_call", + "sync.client", + "sync.collection", + "sync.collections", + "configuration", + "request_handler", + "sync.conversations_models", + "sync.document", + "sync.documents", + "sync.keys", + "sync.multi_search", + "sync.overrides", + "sync.operations", + "sync.synonyms", + "sync.synonym_set", + "sync.synonym_sets", + "preprocess", + "sync.stopwords", +] + +# Create a namedtuple to mock sys.version_info +VersionInfo = namedtuple( + "VersionInfo", + ["major", "minor", "micro", "releaselevel", "serial"], +) + + +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="Test is only for Python < 3.11", +) +def test_import_typing(mocker: MockFixture) -> None: + """Test that the typing module is imported when Python version is 3.11 or higher.""" + mock_version_info = VersionInfo(3, 11, 0, "final", 0) + mocker.patch.object(sys, "version_info", mock_version_info) + + # Import modules dynamically and assign them to a list + modules = [importlib.import_module(f"typesense.{name}") for name in module_names] + typing_modules = [ + importlib.import_module(f"typesense.types.{name}") + for name in typing_module_names + ] + + for module in modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing") + + for module in typing_modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing") + + +def test_import_typing_extensions(mocker: MockFixture) -> None: + """Test that the typing_extensions module is imported when Python version < 3.11.""" + mock_version_info = VersionInfo(3, 10, 0, "final", 0) + mocker.patch.object(sys, "version_info", mock_version_info) + + # Import modules dynamically and assign them to a list + init_imports = [ + importlib.import_module(f"typesense.{name}") for name in module_names + ] + modules = [importlib.reload(import_module) for import_module in init_imports] + + init_typing_imports = [ + importlib.import_module(f"typesense.types.{name}") + for name in typing_module_names + ] + + typing_modules = [ + importlib.reload(import_module) for import_module in init_typing_imports + ] + + for module in modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing_extensions") + + for module in typing_modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing_extensions") diff --git a/tests/key_test.py b/tests/key_test.py new file mode 100644 index 0000000..5fd6e37 --- /dev/null +++ b/tests/key_test.py @@ -0,0 +1,113 @@ +"""Tests for the Key class.""" + + + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.key import AsyncKey +from typesense.async_.keys import AsyncKeys +from typesense.sync.key import Key +from typesense.sync.keys import Keys + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Key object is initialized correctly.""" + key = Key(fake_api_call, 3) + + assert key.key_id == 3 + assert_match_object(key.api_call, fake_api_call) + assert_object_lists_match( + key.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + key.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert key._endpoint_path == "/keys/3" # noqa: WPS437 + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncKey object is initialized correctly.""" + key = AsyncKey(fake_async_api_call, 3) + + assert key.key_id == 3 + assert_match_object(key.api_call, fake_async_api_call) + assert_object_lists_match( + key.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + key.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert key._endpoint_path == "/keys/3" # noqa: WPS437 + + +def test_actual_retrieve( + actual_keys: Keys, + delete_all_keys: None, + delete_all: None, + create_key_id: int, +) -> None: + """Test that the Key object can retrieve an key from Typesense Server.""" + response = actual_keys[create_key_id].retrieve() + + assert_to_contain_object( + response, + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "id": create_key_id, + }, + ) + + +def test_actual_delete( + actual_keys: Keys, + delete_all_keys: None, + delete_all: None, + create_key_id: int, +) -> None: + """Test that the Key object can delete an key from Typesense Server.""" + response = actual_keys[create_key_id].delete() + + assert response == {"id": create_key_id} + + +async def test_actual_retrieve_async( + actual_async_keys: AsyncKeys, + delete_all_keys: None, + delete_all: None, + create_key_id: int, +) -> None: + """Test that the AsyncKey object can retrieve an key from Typesense Server.""" + response = await actual_async_keys[create_key_id].retrieve() + + assert_to_contain_object( + response, + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "id": create_key_id, + }, + ) + + +async def test_actual_delete_async( + actual_async_keys: AsyncKeys, + delete_all_keys: None, + delete_all: None, + create_key_id: int, +) -> None: + """Test that the AsyncKey object can delete an key from Typesense Server.""" + response = await actual_async_keys[create_key_id].delete() + + assert response == {"id": create_key_id} diff --git a/tests/keys_test.py b/tests/keys_test.py new file mode 100644 index 0000000..c6fda67 --- /dev/null +++ b/tests/keys_test.py @@ -0,0 +1,265 @@ +"""Tests for the Keys class.""" + + +import base64 +import hashlib +import hmac +import json + + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.keys import AsyncKeys +from typesense.sync.keys import Keys + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Keys object is initialized correctly.""" + keys = Keys(fake_api_call) + + assert_match_object(keys.api_call, fake_api_call) + assert_object_lists_match( + keys.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + keys.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not keys.keys + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncKeys object is initialized correctly.""" + keys = AsyncKeys(fake_async_api_call) + + assert_match_object(keys.api_call, fake_async_api_call) + assert_object_lists_match( + keys.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + keys.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not keys.keys + + +def test_get_missing_key(fake_keys: Keys) -> None: + """Test that the Keys object can get a missing key.""" + key = fake_keys[1] + + assert_match_object(key.api_call, fake_keys.api_call) + assert_object_lists_match( + key.api_call.node_manager.nodes, fake_keys.api_call.node_manager.nodes + ) + assert_match_object( + key.api_call.config.nearest_node, + fake_keys.api_call.config.nearest_node, + ) + assert key._endpoint_path == "/keys/1" # noqa: WPS437 + + +def test_get_missing_key_async(fake_async_keys: AsyncKeys) -> None: + """Test that the AsyncKeys object can get a missing key.""" + key = fake_async_keys[1] + + assert_match_object(key.api_call, fake_async_keys.api_call) + assert_object_lists_match( + key.api_call.node_manager.nodes, + fake_async_keys.api_call.node_manager.nodes, + ) + assert_match_object( + key.api_call.config.nearest_node, + fake_async_keys.api_call.config.nearest_node, + ) + assert key._endpoint_path == "/keys/1" # noqa: WPS437 + + +def test_get_existing_key(fake_keys: Keys) -> None: + """Test that the Keys object can get an existing key.""" + key = fake_keys[1] + fetched_key = fake_keys[1] + + assert len(fake_keys.keys) == 1 + + assert key is fetched_key + + +def test_get_existing_key_async(fake_async_keys: AsyncKeys) -> None: + """Test that the AsyncKeys object can get an existing key.""" + key = fake_async_keys[1] + fetched_key = fake_async_keys[1] + + assert len(fake_async_keys.keys) == 1 + + assert key is fetched_key + + +def test_actual_create( + actual_keys: Keys, +) -> None: + """Test that the Keys object can create an key on Typesense Server.""" + response = actual_keys.create( + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + }, + ) + + assert_to_contain_object( + response, + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "autodelete": False, + }, + ) + + +def test_actual_retrieve( + actual_keys: Keys, + delete_all: None, + delete_all_keys: None, + create_key_id: int, +) -> None: + """Test that the Keys object can retrieve an key from Typesense Server.""" + response = actual_keys.retrieve() + assert len(response["keys"]) == 1 + assert_to_contain_object( + response["keys"][0], + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "autodelete": False, + "id": create_key_id, + }, + ) + + +def test_generate_scoped_search_key( + fake_keys: Keys, +) -> None: + """Test that the Keys object can generate a scoped search key.""" + # Use a real key that works on Typesense server + search_key = "KmacipDKNqAM3YiigXfw5pZvNOrPQUba" + search_parameters = { + "q": "search query", + "collection": "companies", + "filter_by": "num_employees:>10", + } + + key = fake_keys.generate_scoped_search_key(search_key, search_parameters) + + decoded_key = base64.b64decode(key).decode("utf-8") + + extracted_key = { + "digest": decoded_key[:44], + "key_prefix": decoded_key[44:48], + "params_str": decoded_key[48:], + } + assert extracted_key["key_prefix"] == search_key[:4] + + expected_params_str = json.dumps(search_parameters) + assert extracted_key["params_str"] == expected_params_str + + recomputed_digest = base64.b64encode( + hmac.new( + search_key.encode("utf-8"), + expected_params_str.encode("utf-8"), + digestmod=hashlib.sha256, + ).digest(), + ).decode("utf-8") + + assert extracted_key["digest"] == recomputed_digest + + +async def test_actual_create_async( + actual_async_keys: AsyncKeys, +) -> None: + """Test that the AsyncKeys object can create an key on Typesense Server.""" + response = await actual_async_keys.create( + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + }, + ) + + assert_to_contain_object( + response, + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "autodelete": False, + }, + ) + + +async def test_actual_retrieve_async( + actual_async_keys: AsyncKeys, + delete_all: None, + delete_all_keys: None, + create_key_id: int, +) -> None: + """Test that the AsyncKeys object can retrieve an key from Typesense Server.""" + response = await actual_async_keys.retrieve() + assert len(response["keys"]) == 1 + assert_to_contain_object( + response["keys"][0], + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "autodelete": False, + "id": create_key_id, + }, + ) + + +def test_generate_scoped_search_key_async( + fake_async_keys: AsyncKeys, +) -> None: + """Test that the AsyncKeys object can generate a scoped search key.""" + # Use a real key that works on Typesense server + search_key = "KmacipDKNqAM3YiigXfw5pZvNOrPQUba" + search_parameters = { + "q": "search query", + "collection": "companies", + "filter_by": "num_employees:>10", + } + + key = fake_async_keys.generate_scoped_search_key(search_key, search_parameters) + + decoded_key = base64.b64decode(key).decode("utf-8") + + extracted_key = { + "digest": decoded_key[:44], + "key_prefix": decoded_key[44:48], + "params_str": decoded_key[48:], + } + assert extracted_key["key_prefix"] == search_key[:4] + + expected_params_str = json.dumps(search_parameters) + assert extracted_key["params_str"] == expected_params_str + + recomputed_digest = base64.b64encode( + hmac.new( + search_key.encode("utf-8"), + expected_params_str.encode("utf-8"), + digestmod=hashlib.sha256, + ).digest(), + ).decode("utf-8") + + assert extracted_key["digest"] == recomputed_digest diff --git a/tests/metrics_test.py b/tests/metrics_test.py new file mode 100644 index 0000000..12fc300 --- /dev/null +++ b/tests/metrics_test.py @@ -0,0 +1,85 @@ +"""Tests for the Metrics class.""" + + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.metrics import AsyncMetrics +from typesense.sync.metrics import Metrics + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Metrics object is initialized correctly.""" + metrics = Metrics(fake_api_call) + + assert_match_object(metrics.api_call, fake_api_call) + assert_object_lists_match( + metrics.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + metrics.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert metrics.resource_path == "/metrics.json" # noqa: WPS437 + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncMetrics object is initialized correctly.""" + metrics = AsyncMetrics(fake_async_api_call) + + assert_match_object(metrics.api_call, fake_async_api_call) + assert_object_lists_match( + metrics.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + metrics.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert metrics.resource_path == "/metrics.json" # noqa: WPS437 + + +def test_actual_retrieve(actual_metrics: Metrics) -> None: + """Test that the Metrics object can retrieve metrics on Typesense server and verify response structure.""" + response = actual_metrics.retrieve() + + assert "system_cpu_active_percentage" in response + assert "system_disk_total_bytes" in response + assert "system_disk_used_bytes" in response + assert "system_memory_total_bytes" in response + assert "system_memory_used_bytes" in response + assert "system_network_received_bytes" in response + assert "system_network_sent_bytes" in response + assert "typesense_memory_active_bytes" in response + assert "typesense_memory_allocated_bytes" in response + assert "typesense_memory_fragmentation_ratio" in response + + assert "typesense_memory_mapped_bytes" in response + assert "typesense_memory_metadata_bytes" in response + assert "typesense_memory_resident_bytes" in response + assert "typesense_memory_retained_bytes" in response + + +async def test_actual_retrieve_async(actual_async_metrics: AsyncMetrics) -> None: + """Test that the AsyncMetrics object can retrieve metrics on Typesense server and verify response structure.""" + response = await actual_async_metrics.retrieve() + + assert "system_cpu_active_percentage" in response + assert "system_disk_total_bytes" in response + assert "system_disk_used_bytes" in response + assert "system_memory_total_bytes" in response + assert "system_memory_used_bytes" in response + assert "system_network_received_bytes" in response + assert "system_network_sent_bytes" in response + assert "typesense_memory_active_bytes" in response + assert "typesense_memory_allocated_bytes" in response + assert "typesense_memory_fragmentation_ratio" in response + + assert "typesense_memory_mapped_bytes" in response + assert "typesense_memory_metadata_bytes" in response + assert "typesense_memory_resident_bytes" in response + assert "typesense_memory_retained_bytes" in response diff --git a/tests/multi_search_test.py b/tests/multi_search_test.py new file mode 100644 index 0000000..03df911 --- /dev/null +++ b/tests/multi_search_test.py @@ -0,0 +1,393 @@ +"""Tests for the MultiSearch class.""" + +import pytest + +from tests.fixtures.document_fixtures import Companies +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense import exceptions +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.multi_search import AsyncMultiSearch +from typesense.sync.multi_search import MultiSearch +from typesense.types.multi_search import MultiSearchRequestSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the MultiSearch object is initialized correctly.""" + multi_search = MultiSearch(fake_api_call) + + assert_match_object(multi_search.api_call, fake_api_call) + assert_object_lists_match( + multi_search.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + multi_search.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncMultiSearch object is initialized correctly.""" + multi_search = AsyncMultiSearch(fake_async_api_call) + + assert_match_object(multi_search.api_call, fake_async_api_call) + assert_object_lists_match( + multi_search.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + multi_search.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + +def test_multi_search_single_search( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform a single search.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + ], + } + response = actual_multi_search.perform( + search_queries=request_params, + ) + + assert len(response.get("results")) == 1 + assert_to_contain_keys( + response.get("results")[0], + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("results")[0].get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_multi_search_multiple_searches( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform multiple searches.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + {"q": "company", "query_by": "company_name", "collection": "companies"}, + ], + } + + response = actual_multi_search.perform(search_queries=request_params) + + assert len(response.get("results")) == len(request_params.get("searches")) + for search_results in response.get("results"): + assert_to_contain_keys( + search_results, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + search_results.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_multi_search_union( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform multiple searches.""" + request_params: MultiSearchRequestSchema = { + "union": True, + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + {"q": "company", "query_by": "company_name", "collection": "companies"}, + ], + } + + response = actual_multi_search.perform(search_queries=request_params) + + assert_to_contain_keys( + response, + [ + "found", + "hits", + "page", + "out_of", + "union_request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + [ + "collection", + "document", + "highlights", + "highlight", + "text_match", + "text_match_info", + "search_index", + ], + ) + + +def test_multi_search_array( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform a search with an array query_by.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": ["company_name"], "collection": "companies"}, + ], + } + response = actual_multi_search.perform(search_queries=request_params) + + assert len(response.get("results")) == 1 + assert_to_contain_keys( + response.get("results")[0], + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("results")[0].get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_search_invalid_parameters( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object raises an error when invalid parameters are passed.""" + with pytest.raises(exceptions.InvalidParameter): + actual_multi_search.perform( + { + "searches": [ + { + "q": "com", + "query_by": "company_name", + "invalid": [Companies(company_name="", id="", num_employees=0)], + }, + ], + }, + ) + + with pytest.raises(exceptions.InvalidParameter): + actual_multi_search.perform( + { + "searches": [ + { + "q": "com", + "query_by": "company_name", + "invalid": Companies(company_name="", id="", num_employees=0), + }, + ], + }, + ) + + +async def test_multi_search_single_search_async( + actual_async_multi_search: AsyncMultiSearch, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncMultiSearch object can perform a single search.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + ], + } + response = await actual_async_multi_search.perform( + search_queries=request_params, + ) + + assert len(response.get("results")) == 1 + assert_to_contain_keys( + response.get("results")[0], + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("results")[0].get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +async def test_multi_search_multiple_searches_async( + actual_async_multi_search: AsyncMultiSearch, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncMultiSearch object can perform multiple searches.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + {"q": "company", "query_by": "company_name", "collection": "companies"}, + ], + } + + response = await actual_async_multi_search.perform(search_queries=request_params) + + assert len(response.get("results")) == len(request_params.get("searches")) + for search_results in response.get("results"): + assert_to_contain_keys( + search_results, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + search_results.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +async def test_multi_search_union_async( + actual_async_multi_search: AsyncMultiSearch, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncMultiSearch object can perform multiple searches with union.""" + request_params: MultiSearchRequestSchema = { + "union": True, + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + {"q": "company", "query_by": "company_name", "collection": "companies"}, + ], + } + + response = await actual_async_multi_search.perform(search_queries=request_params) + + assert_to_contain_keys( + response, + [ + "found", + "hits", + "page", + "out_of", + "union_request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + [ + "collection", + "document", + "highlights", + "highlight", + "text_match", + "text_match_info", + "search_index", + ], + ) + + +async def test_multi_search_array_async( + actual_async_multi_search: AsyncMultiSearch, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the AsyncMultiSearch object can perform a search with an array query_by.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": ["company_name"], "collection": "companies"}, + ], + } + response = await actual_async_multi_search.perform(search_queries=request_params) + + assert len(response.get("results")) == 1 + assert_to_contain_keys( + response.get("results")[0], + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("results")[0].get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) diff --git a/tests/nl_search_model_test.py b/tests/nl_search_model_test.py new file mode 100644 index 0000000..466c0a2 --- /dev/null +++ b/tests/nl_search_model_test.py @@ -0,0 +1,183 @@ +"""Tests for the NLSearchModel class.""" + + +import pytest +from dotenv import load_dotenv + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.nl_search_model import AsyncNLSearchModel +from typesense.async_.nl_search_models import AsyncNLSearchModels +from typesense.sync.nl_search_model import NLSearchModel +from typesense.sync.nl_search_models import NLSearchModels + +load_dotenv() + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the NLSearchModel object is initialized correctly.""" + nl_search_model = NLSearchModel( + fake_api_call, + "nl_search_model_id", + ) + + assert nl_search_model.model_id == "nl_search_model_id" + assert_match_object(nl_search_model.api_call, fake_api_call) + assert_object_lists_match( + nl_search_model.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_model.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + nl_search_model._endpoint_path # noqa: WPS437 + == "/nl_search_models/nl_search_model_id" + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncNLSearchModel object is initialized correctly.""" + nl_search_model = AsyncNLSearchModel( + fake_async_api_call, + "nl_search_model_id", + ) + + assert nl_search_model.model_id == "nl_search_model_id" + assert_match_object(nl_search_model.api_call, fake_async_api_call) + assert_object_lists_match( + nl_search_model.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_model.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert ( + nl_search_model._endpoint_path # noqa: WPS437 + == "/nl_search_models/nl_search_model_id" + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test it can retrieve an NL search model from Typesense Server.""" + response = actual_nl_search_models[create_nl_search_model].retrieve() + + assert_to_contain_keys( + response, + ["id", "model_name", "system_prompt", "max_bytes", "api_key"], + ) + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +def test_actual_update( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can update an NL search model from Typesense Server.""" + response = actual_nl_search_models[create_nl_search_model].update( + {"system_prompt": "This is a new system prompt for NL search"}, + ) + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + ], + ) + + assert response.get("system_prompt") == "This is a new system prompt for NL search" + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +def test_actual_delete( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can delete an NL search model from Typesense Server.""" + response = actual_nl_search_models[create_nl_search_model].delete() + + assert_to_contain_keys( + response, + ["id"], + ) + + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +async def test_actual_retrieve_async( + actual_async_nl_search_models: AsyncNLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test it can retrieve an NL search model from Typesense Server.""" + response = await actual_async_nl_search_models[create_nl_search_model].retrieve() + + assert_to_contain_keys( + response, + ["id", "model_name", "system_prompt", "max_bytes", "api_key"], + ) + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +async def test_actual_update_async( + actual_async_nl_search_models: AsyncNLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can update an NL search model from Typesense Server.""" + response = await actual_async_nl_search_models[create_nl_search_model].update( + {"system_prompt": "This is a new system prompt for NL search"}, + ) + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + ], + ) + + assert response.get("system_prompt") == "This is a new system prompt for NL search" + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +async def test_actual_delete_async( + actual_async_nl_search_models: AsyncNLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can delete an NL search model from Typesense Server.""" + response = await actual_async_nl_search_models[create_nl_search_model].delete() + + assert_to_contain_keys( + response, + ["id"], + ) + + assert response.get("id") == create_nl_search_model diff --git a/tests/nl_search_models_test.py b/tests/nl_search_models_test.py new file mode 100644 index 0000000..bf741a3 --- /dev/null +++ b/tests/nl_search_models_test.py @@ -0,0 +1,211 @@ +"""Tests for the NLSearchModels class.""" + + +import os +import sys + +import pytest + +if sys.version_info >= (3, 11): + pass +else: + pass + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.nl_search_models import AsyncNLSearchModels +from typesense.sync.nl_search_models import NLSearchModels + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the NLSearchModels object is initialized correctly.""" + nl_search_models = NLSearchModels(fake_api_call) + + assert_match_object(nl_search_models.api_call, fake_api_call) + assert_object_lists_match( + nl_search_models.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_models.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not nl_search_models.nl_search_models + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncNLSearchModels object is initialized correctly.""" + nl_search_models = AsyncNLSearchModels(fake_async_api_call) + + assert_match_object(nl_search_models.api_call, fake_async_api_call) + assert_object_lists_match( + nl_search_models.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_models.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not nl_search_models.nl_search_models + + +def test_get_missing_nl_search_model( + fake_nl_search_models: NLSearchModels, +) -> None: + """Test that the NLSearchModels object can get a missing nl_search_model.""" + nl_search_model = fake_nl_search_models["nl_search_model_id"] + + assert_match_object( + nl_search_model.api_call, + fake_nl_search_models.api_call, + ) + assert_object_lists_match( + nl_search_model.api_call.node_manager.nodes, + fake_nl_search_models.api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_model.api_call.config.nearest_node, + fake_nl_search_models.api_call.config.nearest_node, + ) + assert ( + nl_search_model._endpoint_path # noqa: WPS437 + == "/nl_search_models/nl_search_model_id" + ) + + +def test_get_missing_nl_search_model_async( + fake_async_nl_search_models: AsyncNLSearchModels, +) -> None: + """Test that the AsyncNLSearchModels object can get a missing nl_search_model.""" + nl_search_model = fake_async_nl_search_models["nl_search_model_id"] + + assert_match_object( + nl_search_model.api_call, + fake_async_nl_search_models.api_call, + ) + assert_object_lists_match( + nl_search_model.api_call.node_manager.nodes, + fake_async_nl_search_models.api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_model.api_call.config.nearest_node, + fake_async_nl_search_models.api_call.config.nearest_node, + ) + assert ( + nl_search_model._endpoint_path # noqa: WPS437 + == "/nl_search_models/nl_search_model_id" + ) + + +def test_get_existing_nl_search_model( + fake_nl_search_models: NLSearchModels, +) -> None: + """Test that the NLSearchModels object can get an existing nl_search_model.""" + nl_search_model = fake_nl_search_models["nl_search_model_id"] + fetched_nl_search_model = fake_nl_search_models["nl_search_model_id"] + + assert len(fake_nl_search_models.nl_search_models) == 1 + + assert nl_search_model is fetched_nl_search_model + + +def test_get_existing_nl_search_model_async( + fake_async_nl_search_models: AsyncNLSearchModels, +) -> None: + """Test that the AsyncNLSearchModels object can get an existing nl_search_model.""" + nl_search_model = fake_async_nl_search_models["nl_search_model_id"] + fetched_nl_search_model = fake_async_nl_search_models["nl_search_model_id"] + + assert len(fake_async_nl_search_models.nl_search_models) == 1 + + assert nl_search_model is fetched_nl_search_model + + +@pytest.mark.open_ai +def test_actual_create( + actual_nl_search_models: NLSearchModels, +) -> None: + """Test that it can create an NL search model on Typesense Server.""" + response = actual_nl_search_models.create( + { + "api_key": os.environ.get("OPEN_AI_KEY", "test-api-key"), + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is meant for testing purposes", + }, + ) + + assert_to_contain_keys( + response, + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can retrieve NL search models from Typesense Server.""" + response = actual_nl_search_models.retrieve() + assert len(response) == 1 + assert_to_contain_object( + response[0], + { + "id": create_nl_search_model, + }, + ) + assert_to_contain_keys( + response[0], + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +async def test_actual_create_async( + actual_async_nl_search_models: AsyncNLSearchModels, +) -> None: + """Test that it can create an NL search model on Typesense Server.""" + response = await actual_async_nl_search_models.create( + { + "api_key": os.environ.get("OPEN_AI_KEY", "test-api-key"), + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is meant for testing purposes", + }, + ) + + assert_to_contain_keys( + response, + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +async def test_actual_retrieve_async( + actual_async_nl_search_models: AsyncNLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can retrieve NL search models from Typesense Server.""" + response = await actual_async_nl_search_models.retrieve() + assert len(response) == 1 + assert_to_contain_object( + response[0], + { + "id": create_nl_search_model, + }, + ) + assert_to_contain_keys( + response[0], + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) diff --git a/tests/node_test.py b/tests/node_test.py new file mode 100644 index 0000000..23ab20d --- /dev/null +++ b/tests/node_test.py @@ -0,0 +1,65 @@ +"""Tests for the Node class.""" + +import time + +import pytest + +from tests.utils.object_assertions import assert_match_object +from typesense.configuration import Node +from typesense.exceptions import ConfigError + + +def test_node_initialization() -> None: + """Test the initialization of the Node class using an object.""" + node = Node(host="localhost", port=8108, path="/path", protocol="http") + + current_time = int(time.time()) + expected = { + "host": "localhost", + "port": 8108, + "path": "/path", + "protocol": "http", + "healthy": True, + "last_access_ts": current_time, + } + assert_match_object(node, expected) + + +def test_node_from_url() -> None: + """Test the initialization of the Node class using a URL.""" + node = Node.from_url("http://localhost:8108/path") + + current_time = int(time.time()) + expected = { + "host": "localhost", + "port": 8108, + "path": "/path", + "protocol": "http", + "healthy": True, + "last_access_ts": current_time, + } + assert_match_object(node, expected) + + +def test_node_from_url_missing_hostname() -> None: + """Test the initialization of the Node class using a URL without a host name.""" + with pytest.raises(ConfigError, match="Node URL does not contain the host name."): + Node.from_url("http://:8108/path") + + +def test_node_from_url_missing_port() -> None: + """Test the initialization of the Node class using a URL without a port.""" + with pytest.raises(ConfigError, match="Node URL does not contain the port."): + Node.from_url("http://localhost:/path") + + +def test_node_from_url_missing_scheme() -> None: + """Test the initialization of the Node class using a URL without a scheme.""" + with pytest.raises(ConfigError, match="Node URL does not contain the protocol."): + Node.from_url("//localhost:8108/path") + + +def test_node_url() -> None: + """Test the URL method of the Node class.""" + node = Node(host="localhost", port=8108, path="/path", protocol="http") + assert node.url() == "http://localhost:8108/path" diff --git a/tests/operations_test.py b/tests/operations_test.py new file mode 100644 index 0000000..b48b946 --- /dev/null +++ b/tests/operations_test.py @@ -0,0 +1,157 @@ +"""Tests for the Operations class.""" + + +import pytest + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.operations import AsyncOperations +from typesense.exceptions import ObjectNotFound +from typesense.sync.operations import Operations + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Operations object is initialized correctly.""" + operations = Operations(fake_api_call) + + assert_match_object(operations.api_call, fake_api_call) + assert_object_lists_match( + operations.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + operations.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + operations._endpoint_path("resource") == "/operations/resource" # noqa: WPS437 + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncOperations object is initialized correctly.""" + operations = AsyncOperations(fake_async_api_call) + + assert_match_object(operations.api_call, fake_async_api_call) + assert_object_lists_match( + operations.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + operations.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert ( + operations._endpoint_path("resource") == "/operations/resource" # noqa: WPS437 + ) + + +def test_vote(actual_operations: Operations) -> None: + """Test that the Operations object can perform the vote operation.""" + response = actual_operations.perform("vote") + + # It will error on single node clusters if asserted to True + assert response["success"] is not None + + +def test_db_compact(actual_operations: Operations) -> None: + """Test that the Operations object can perform the db/compact operation.""" + response = actual_operations.perform("db/compact") + + assert response["success"] + + +def test_cache_clear(actual_operations: Operations) -> None: + """Test that the Operations object can perform the cache/clear operation.""" + response = actual_operations.perform("cache/clear") + + assert response["success"] + + +def test_snapshot(actual_operations: Operations) -> None: + """Test that the Operations object can perform the snapshot operation.""" + response = actual_operations.perform( + "snapshot", + {"snapshot_path": "/tmp"}, # noqa: S108 + ) + + assert response["success"] + + +def test_health(actual_operations: Operations) -> None: + """Test that the Operations object can perform the health operation.""" + response = actual_operations.is_healthy() + + assert response + + +def test_log_slow_requests_time_ms(actual_operations: Operations) -> None: + """Test that the Operations object can perform the log_slow_requests_time_ms operation.""" + response = actual_operations.toggle_slow_request_log( + {"log_slow_requests_time_ms": 100}, + ) + + assert response["success"] + + +def test_invalid_operation(actual_operations: Operations) -> None: + """Test that the Operations object throws an error for an invalid operation.""" + with pytest.raises(ObjectNotFound): + actual_operations.perform("invalid") + + +async def test_vote_async(actual_async_operations: AsyncOperations) -> None: + """Test that the AsyncOperations object can perform the vote operation.""" + response = await actual_async_operations.perform("vote") + + # It will error on single node clusters if asserted to True + assert response["success"] is not None + + +async def test_db_compact_async(actual_async_operations: AsyncOperations) -> None: + """Test that the AsyncOperations object can perform the db/compact operation.""" + response = await actual_async_operations.perform("db/compact") + + assert response["success"] + + +async def test_cache_clear_async(actual_async_operations: AsyncOperations) -> None: + """Test that the AsyncOperations object can perform the cache/clear operation.""" + response = await actual_async_operations.perform("cache/clear") + + assert response["success"] + + +async def test_snapshot_async(actual_async_operations: AsyncOperations) -> None: + """Test that the AsyncOperations object can perform the snapshot operation.""" + response = await actual_async_operations.perform( + "snapshot", + {"snapshot_path": "/tmp"}, # noqa: S108 + ) + + assert response["success"] + + +async def test_health_async(actual_async_operations: AsyncOperations) -> None: + """Test that the AsyncOperations object can perform the health operation.""" + response = await actual_async_operations.is_healthy() + + assert response + + +async def test_log_slow_requests_time_ms_async( + actual_async_operations: AsyncOperations, +) -> None: + """Test that the AsyncOperations object can perform the log_slow_requests_time_ms operation.""" + response = await actual_async_operations.toggle_slow_request_log( + {"log_slow_requests_time_ms": 100}, + ) + + assert response["success"] + + +async def test_invalid_operation_async(actual_async_operations: AsyncOperations) -> None: + """Test that the AsyncOperations object throws an error for an invalid operation.""" + with pytest.raises(ObjectNotFound): + await actual_async_operations.perform("invalid") diff --git a/tests/override_test.py b/tests/override_test.py new file mode 100644 index 0000000..89526c7 --- /dev/null +++ b/tests/override_test.py @@ -0,0 +1,145 @@ +"""Tests for the Override class.""" + + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.collections import AsyncCollections +from typesense.async_.override import AsyncOverride +from typesense.sync.collections import Collections +from typesense.sync.override import Override +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client + + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run override tests only on less than v30", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Override object is initialized correctly.""" + override = Override(fake_api_call, "companies", "company_override") + + assert override.collection_name == "companies" + assert override.override_id == "company_override" + assert_match_object(override.api_call, fake_api_call) + assert_object_lists_match( + override.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + override.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + override._endpoint_path() # noqa: WPS437 + == "/collections/companies/overrides/company_override" + ) + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_override: None, +) -> None: + """Test that the Override object can retrieve an override from Typesense Server.""" + response = actual_collections["companies"].overrides["company_override"].retrieve() + + assert response["rule"] == { + "match": "exact", + "query": "companies", + } + assert response["filter_by"] == "num_employees>10" + assert_to_contain_object( + response, + { + "rule": { + "match": "exact", + "query": "companies", + }, + "filter_by": "num_employees>10", + }, + ) + + +def test_actual_delete( + actual_collections: Collections, + delete_all: None, + create_override: None, +) -> None: + """Test that the Override object can delete an override from Typesense Server.""" + response = actual_collections["companies"].overrides["company_override"].delete() + + assert response == {"id": "company_override"} + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncOverride object is initialized correctly.""" + override = AsyncOverride(fake_async_api_call, "companies", "company_override") + + assert override.collection_name == "companies" + assert override.override_id == "company_override" + assert_match_object(override.api_call, fake_async_api_call) + assert_object_lists_match( + override.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + override.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert ( + override._endpoint_path() # noqa: WPS437 + == "/collections/companies/overrides/company_override" + ) + + +async def test_actual_retrieve_async( + actual_async_collections: AsyncCollections, + delete_all: None, + create_override: None, +) -> None: + """Test that the AsyncOverride object can retrieve an override from Typesense Server.""" + response = await actual_async_collections["companies"].overrides["company_override"].retrieve() + + assert response["rule"] == { + "match": "exact", + "query": "companies", + } + assert response["filter_by"] == "num_employees>10" + assert_to_contain_object( + response, + { + "rule": { + "match": "exact", + "query": "companies", + }, + "filter_by": "num_employees>10", + }, + ) + + +async def test_actual_delete_async( + actual_async_collections: AsyncCollections, + delete_all: None, + create_override: None, +) -> None: + """Test that the AsyncOverride object can delete an override from Typesense Server.""" + response = await actual_async_collections["companies"].overrides["company_override"].delete() + + assert response == {"id": "company_override"} diff --git a/tests/overrides_test.py b/tests/overrides_test.py new file mode 100644 index 0000000..a9376b5 --- /dev/null +++ b/tests/overrides_test.py @@ -0,0 +1,276 @@ +"""Tests for the Overrides class.""" + + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.collections import AsyncCollections +from typesense.sync.collections import Collections +from typesense.sync.overrides import Overrides +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run override tests only on less than v30", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Overrides object is initialized correctly.""" + overrides = Overrides(fake_api_call, "companies") + + assert_match_object(overrides.api_call, fake_api_call) + assert_object_lists_match( + overrides.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + overrides.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not overrides.overrides + + +def test_get_missing_override(fake_overrides: Overrides) -> None: + """Test that the Overrides object can get a missing override.""" + override = fake_overrides["company_override"] + + assert override.override_id == "company_override" + assert_match_object(override.api_call, fake_overrides.api_call) + assert_object_lists_match( + override.api_call.node_manager.nodes, fake_overrides.api_call.node_manager.nodes + ) + assert_match_object( + override.api_call.config.nearest_node, + fake_overrides.api_call.config.nearest_node, + ) + assert override.collection_name == "companies" + assert ( + override._endpoint_path() # noqa: WPS437 + == "/collections/companies/overrides/company_override" + ) + + +def test_get_existing_override(fake_overrides: Overrides) -> None: + """Test that the Overrides object can get an existing override.""" + override = fake_overrides["companies"] + fetched_override = fake_overrides["companies"] + + assert len(fake_overrides.overrides) == 1 + + assert override is fetched_override + + +def test_actual_create( + actual_overrides: Overrides, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Overrides object can create an override on Typesense Server.""" + response = actual_overrides.upsert( + "company_override", + { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + assert response == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + +def test_actual_update( + actual_overrides: Overrides, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Overrides object can update an override on Typesense Server.""" + create_response = actual_overrides.upsert( + "company_override", + { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + assert create_response == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + update_response = actual_overrides.upsert( + "company_override", + { + "rule": {"match": "contains", "query": "companies"}, + "filter_by": "num_employees>20", + }, + ) + + assert update_response == { + "id": "company_override", + "rule": {"match": "contains", "query": "companies"}, + "filter_by": "num_employees>20", + } + + +def test_actual_retrieve( + delete_all: None, + create_override: None, + actual_collections: Collections, +) -> None: + """Test that the Overrides object can retrieve an override from Typesense Server.""" + response = actual_collections["companies"].overrides.retrieve() + + assert len(response["overrides"]) == 1 + assert_to_contain_object( + response["overrides"][0], + { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncOverrides object is initialized correctly.""" + from typesense.async_.overrides import AsyncOverrides + + overrides = AsyncOverrides(fake_async_api_call, "companies") + + assert_match_object(overrides.api_call, fake_async_api_call) + assert_object_lists_match( + overrides.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + overrides.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not overrides.overrides + + +def test_get_missing_override_async(fake_async_overrides) -> None: + """Test that the AsyncOverrides object can get a missing override.""" + + override = fake_async_overrides["company_override"] + + assert override.override_id == "company_override" + assert_match_object(override.api_call, fake_async_overrides.api_call) + assert_object_lists_match( + override.api_call.node_manager.nodes, fake_async_overrides.api_call.node_manager.nodes + ) + assert_match_object( + override.api_call.config.nearest_node, + fake_async_overrides.api_call.config.nearest_node, + ) + assert override.collection_name == "companies" + assert ( + override._endpoint_path() # noqa: WPS437 + == "/collections/companies/overrides/company_override" + ) + + +def test_get_existing_override_async(fake_async_overrides) -> None: + """Test that the AsyncOverrides object can get an existing override.""" + override = fake_async_overrides["companies"] + fetched_override = fake_async_overrides["companies"] + + assert len(fake_async_overrides.overrides) == 1 + + assert override is fetched_override + + +async def test_actual_create_async( + actual_async_overrides, + delete_all: None, + create_collection: None, +) -> None: + """Test that the AsyncOverrides object can create an override on Typesense Server.""" + response = await actual_async_overrides.upsert( + "company_override", + { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + assert response == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + +async def test_actual_update_async( + actual_async_overrides, + delete_all: None, + create_collection: None, +) -> None: + """Test that the AsyncOverrides object can update an override on Typesense Server.""" + create_response = await actual_async_overrides.upsert( + "company_override", + { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + assert create_response == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + update_response = await actual_async_overrides.upsert( + "company_override", + { + "rule": {"match": "contains", "query": "companies"}, + "filter_by": "num_employees>20", + }, + ) + + assert update_response == { + "id": "company_override", + "rule": {"match": "contains", "query": "companies"}, + "filter_by": "num_employees>20", + } + + +async def test_actual_retrieve_async( + delete_all: None, + create_override: None, + actual_async_collections: AsyncCollections, +) -> None: + """Test that the AsyncOverrides object can retrieve an override from Typesense Server.""" + response = await actual_async_collections["companies"].overrides.retrieve() + + assert len(response["overrides"]) == 1 + assert_to_contain_object( + response["overrides"][0], + { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) diff --git a/tests/preprocess_test.py b/tests/preprocess_test.py new file mode 100644 index 0000000..4e081ad --- /dev/null +++ b/tests/preprocess_test.py @@ -0,0 +1,83 @@ +"""Tests for the preprocess module.""" + +import pytest + +from typesense import exceptions +from typesense.preprocess import ( + ParamSchema, + process_param_list, + stringify, + stringify_search_params, +) + + +def test_stringify_str() -> None: + """Test that the function can stringify a string.""" + assert stringify("string") == "string" + + +def test_stringify_bool() -> None: + """Test that the function can stringify a boolean.""" + assert stringify(True) == "true" + + +def test_stringify_int() -> None: + """Test that the function can stringify an integer.""" + assert stringify(42) == "42" + + +def test_stringify_float() -> None: + """Test that the function can stringify a float.""" + with pytest.raises(exceptions.InvalidParameter): + stringify(3.15) + + +def test_stringify_list() -> None: + """Test that the function can stringify a list.""" + with pytest.raises(exceptions.InvalidParameter): + stringify([1, 2, 3]) + + +def test_concat_string_list() -> None: + """Test that the function can concatenate a list of strings.""" + assert process_param_list(["a", "b", "c"]) == "a,b,c" + + +def test_concat_bool_list() -> None: + """Test that the function can concatenate a list of booleans.""" + assert process_param_list([True, False, True]) == "true,false,true" + + +def test_concat_int_list() -> None: + """Test that the function can concatenate a list of integers.""" + assert process_param_list([1, 2, 3]) == "1,2,3" + + +def test_concat_list_list() -> None: + """Test that the function can concatenate a list of lists.""" + with pytest.raises(exceptions.InvalidParameter): + process_param_list([[1, 2], [3, 4], [5, 6]]) + + +def test_concat_params() -> None: + """Test that the function can concatenate a dictionary of parameters.""" + test_params: ParamSchema = { + "one": "one", + "two": 2, + "three": True, + "four": [1, 2, 3], + "five": ["one", "two", "three"], + "six": [True, False], + "seven": ["one", 2, True], + } + + processed_params = stringify_search_params(test_params) + assert processed_params == { + "one": "one", + "two": "2", + "three": "true", + "four": "1,2,3", + "five": "one,two,three", + "six": "true,false", + "seven": "one,2,true", + } diff --git a/tests/stemming_test.py b/tests/stemming_test.py new file mode 100644 index 0000000..9b53da9 --- /dev/null +++ b/tests/stemming_test.py @@ -0,0 +1,78 @@ +"""Tests for stemming.""" + +from typesense.async_.stemming import AsyncStemming +from typesense.sync.stemming import Stemming + + +def test_actual_upsert( + actual_stemming: Stemming, +) -> None: + """Test that it can upsert a stemming dictionary to Typesense Server.""" + response = actual_stemming.dictionaries.upsert( + "set_1", + [{"word": "running", "root": "run"}, {"word": "fishing", "root": "fish"}], + ) + + assert response == [ + {"word": "running", "root": "run"}, + {"word": "fishing", "root": "fish"}, + ] + + +def test_actual_retrieve_many( + actual_stemming: Stemming, +) -> None: + """Test that it can retrieve all stemming dictionaries from Typesense Server.""" + response = actual_stemming.dictionaries.retrieve() + assert response == {"dictionaries": ["set_1"]} + + +def test_actual_retrieve( + actual_stemming: Stemming, +) -> None: + """Test that it can retrieve a single stemming dictionary from Typesense Server.""" + response = actual_stemming.dictionaries["set_1"].retrieve() + assert response == { + "id": "set_1", + "words": [ + {"word": "running", "root": "run"}, + {"word": "fishing", "root": "fish"}, + ], + } + + +async def test_actual_upsert_async( + actual_async_stemming: AsyncStemming, +) -> None: + """Test that it can upsert a stemming dictionary to Typesense Server.""" + response = await actual_async_stemming.dictionaries.upsert( + "set_1", + [{"word": "running", "root": "run"}, {"word": "fishing", "root": "fish"}], + ) + + assert response == [ + {"word": "running", "root": "run"}, + {"word": "fishing", "root": "fish"}, + ] + + +async def test_actual_retrieve_many_async( + actual_async_stemming: AsyncStemming, +) -> None: + """Test that it can retrieve all stemming dictionaries from Typesense Server.""" + response = await actual_async_stemming.dictionaries.retrieve() + assert response == {"dictionaries": ["set_1"]} + + +async def test_actual_retrieve_async( + actual_async_stemming: AsyncStemming, +) -> None: + """Test that it can retrieve a single stemming dictionary from Typesense Server.""" + response = await actual_async_stemming.dictionaries["set_1"].retrieve() + assert response == { + "id": "set_1", + "words": [ + {"word": "running", "root": "run"}, + {"word": "fishing", "root": "fish"}, + ], + } diff --git a/tests/stopwords_set_test.py b/tests/stopwords_set_test.py new file mode 100644 index 0000000..e330319 --- /dev/null +++ b/tests/stopwords_set_test.py @@ -0,0 +1,98 @@ +"""Tests for the StopwordsSet class.""" + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.stopwords import AsyncStopwords +from typesense.sync.stopwords import Stopwords +from typesense.sync.stopwords_set import StopwordsSet + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the StopwordsSet object is initialized correctly.""" + stopword_set = StopwordsSet(fake_api_call, "company_stopwords") + + assert stopword_set.stopwords_set_id == "company_stopwords" + assert_match_object(stopword_set.api_call, fake_api_call) + assert_object_lists_match( + stopword_set.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + stopword_set.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert stopword_set._endpoint_path == "/stopwords/company_stopwords" # noqa: WPS437 + + +def test_actual_retrieve( + actual_stopwords: Stopwords, + delete_all_stopwords: None, + delete_all: None, + create_stopword: None, +) -> None: + """Test that the StopwordsSet object can retrieve an stopword_set from Typesense Server.""" + response = actual_stopwords["company_stopwords"].retrieve() + + assert response == { + "stopwords": { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + }, + } + + +def test_actual_delete( + actual_stopwords: Stopwords, + create_stopword: None, +) -> None: + """Test that the StopwordsSet object can delete an stopword_set from Typesense Server.""" + response = actual_stopwords["company_stopwords"].delete() + + assert response == {"id": "company_stopwords"} + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncStopwordsSet object is initialized correctly.""" + from typesense.async_.stopwords_set import AsyncStopwordsSet + + stopword_set = AsyncStopwordsSet(fake_async_api_call, "company_stopwords") + + assert stopword_set.stopwords_set_id == "company_stopwords" + assert_match_object(stopword_set.api_call, fake_async_api_call) + assert_object_lists_match( + stopword_set.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + stopword_set.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert stopword_set._endpoint_path == "/stopwords/company_stopwords" # noqa: WPS437 + + +async def test_actual_retrieve_async( + actual_async_stopwords: AsyncStopwords, + delete_all_stopwords: None, + delete_all: None, + create_stopword: None, +) -> None: + """Test that the AsyncStopwordsSet object can retrieve an stopword_set from Typesense Server.""" + response = await actual_async_stopwords["company_stopwords"].retrieve() + + assert response == { + "stopwords": { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + }, + } + + +async def test_actual_delete_async( + actual_async_stopwords: AsyncStopwords, + create_stopword: None, +) -> None: + """Test that the AsyncStopwordsSet object can delete an stopword_set from Typesense Server.""" + response = await actual_async_stopwords["company_stopwords"].delete() + + assert response == {"id": "company_stopwords"} diff --git a/tests/stopwords_test.py b/tests/stopwords_test.py new file mode 100644 index 0000000..6577777 --- /dev/null +++ b/tests/stopwords_test.py @@ -0,0 +1,211 @@ +"""Tests for the Stopwords class.""" + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.stopwords import AsyncStopwords +from typesense.sync.stopwords import Stopwords + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Stopwords object is initialized correctly.""" + stopwords = Stopwords(fake_api_call) + + assert_match_object(stopwords.api_call, fake_api_call) + assert_object_lists_match( + stopwords.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + stopwords.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not stopwords.stopwords_sets + + +def test_get_missing_stopword(fake_stopwords: Stopwords) -> None: + """Test that the Stopwords object can get a missing stopword.""" + stopword = fake_stopwords["company_stopwords"] + + assert stopword.stopwords_set_id == "company_stopwords" + assert_match_object(stopword.api_call, fake_stopwords.api_call) + assert_object_lists_match( + stopword.api_call.node_manager.nodes, fake_stopwords.api_call.node_manager.nodes + ) + assert_match_object( + stopword.api_call.config.nearest_node, + fake_stopwords.api_call.config.nearest_node, + ) + assert stopword._endpoint_path == "/stopwords/company_stopwords" # noqa: WPS437 + + +def test_get_existing_stopword(fake_stopwords: Stopwords) -> None: + """Test that the Stopwords object can get an existing stopword.""" + stopword = fake_stopwords["company_stopwords"] + fetched_stopword = fake_stopwords["company_stopwords"] + + assert len(fake_stopwords.stopwords_sets) == 1 + + assert stopword is fetched_stopword + + +def test_actual_create(actual_stopwords: Stopwords, delete_all_stopwords: None) -> None: + """Test that the Stopwords object can create an stopword on Typesense Server.""" + response = actual_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + } + + +def test_actual_update( + actual_stopwords: Stopwords, + delete_all_stopwords: None, +) -> None: + """Test that the Stopwords object can update an stopword on Typesense Server.""" + create_response = actual_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert create_response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + } + + update_response = actual_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "other"]}, + ) + + assert update_response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "other"], + } + + +def test_actual_retrieve( + delete_all_stopwords: None, + create_stopword: None, + actual_stopwords: Stopwords, +) -> None: + """Test that the Stopwords object can retrieve an stopword from Typesense Server.""" + response = actual_stopwords.retrieve() + + assert len(response["stopwords"]) == 1 + assert_to_contain_object( + response["stopwords"][0], + { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + }, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncStopwords object is initialized correctly.""" + stopwords = AsyncStopwords(fake_async_api_call) + + assert_match_object(stopwords.api_call, fake_async_api_call) + assert_object_lists_match( + stopwords.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + stopwords.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not stopwords.stopwords_sets + + +def test_get_missing_stopword_async(fake_async_stopwords: AsyncStopwords) -> None: + """Test that the AsyncStopwords object can get a missing stopword.""" + stopword = fake_async_stopwords["company_stopwords"] + + assert stopword.stopwords_set_id == "company_stopwords" + assert_match_object(stopword.api_call, fake_async_stopwords.api_call) + assert_object_lists_match( + stopword.api_call.node_manager.nodes, fake_async_stopwords.api_call.node_manager.nodes + ) + assert_match_object( + stopword.api_call.config.nearest_node, + fake_async_stopwords.api_call.config.nearest_node, + ) + assert stopword._endpoint_path == "/stopwords/company_stopwords" # noqa: WPS437 + + +def test_get_existing_stopword_async(fake_async_stopwords: AsyncStopwords) -> None: + """Test that the AsyncStopwords object can get an existing stopword.""" + stopword = fake_async_stopwords["company_stopwords"] + fetched_stopword = fake_async_stopwords["company_stopwords"] + + assert len(fake_async_stopwords.stopwords_sets) == 1 + + assert stopword is fetched_stopword + + +async def test_actual_create_async(actual_async_stopwords: AsyncStopwords, delete_all_stopwords: None) -> None: + """Test that the AsyncStopwords object can create an stopword on Typesense Server.""" + response = await actual_async_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + } + + +async def test_actual_update_async( + actual_async_stopwords: AsyncStopwords, + delete_all_stopwords: None, +) -> None: + """Test that the AsyncStopwords object can update an stopword on Typesense Server.""" + create_response = await actual_async_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert create_response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + } + + update_response = await actual_async_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "other"]}, + ) + + assert update_response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "other"], + } + + +async def test_actual_retrieve_async( + delete_all_stopwords: None, + create_stopword: None, + actual_async_stopwords: AsyncStopwords, +) -> None: + """Test that the AsyncStopwords object can retrieve an stopword from Typesense Server.""" + response = await actual_async_stopwords.retrieve() + + assert len(response["stopwords"]) == 1 + assert_to_contain_object( + response["stopwords"][0], + { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + }, + ) diff --git a/tests/synonym_set_items_test.py b/tests/synonym_set_items_test.py new file mode 100644 index 0000000..f871719 --- /dev/null +++ b/tests/synonym_set_items_test.py @@ -0,0 +1,150 @@ +"""Tests for SynonymSet item-level APIs.""" + + +import pytest + +from tests.utils.version import is_v30_or_above +from typesense.async_.synonym_sets import AsyncSynonymSets +from typesense.sync.client import Client +from typesense.sync.synonym_sets import SynonymSets +from typesense.types.synonym_set import ( + SynonymItemSchema, +) + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run synonym set items tests only on v30+", +) + + +def test_actual_list_items( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can list items from Typesense Server.""" + response = actual_synonym_sets["test-set"].list_items() + + assert response == [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + }, + ] + + +def test_actual_get_item( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can get a specific item from Typesense Server.""" + response = actual_synonym_sets["test-set"].get_item("company_synonym") + + assert response == { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + + +def test_actual_upsert_item( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can upsert an item in Typesense Server.""" + payload: SynonymItemSchema = { + "id": "brand_synonym", + "synonyms": ["brand", "brands", "label"], + } + response = actual_synonym_sets["test-set"].upsert_item("brand_synonym", payload) + + assert response == { + "id": "brand_synonym", + "synonyms": ["brand", "brands", "label"], + } + + +def test_actual_delete_item( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can delete an item from Typesense Server.""" + response = actual_synonym_sets["test-set"].delete_item("company_synonym") + + assert response == {"id": "company_synonym"} + + +async def test_actual_list_items_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSet object can list items from Typesense Server.""" + response = await actual_async_synonym_sets["test-set"].list_items() + + assert response == [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + }, + ] + + +async def test_actual_get_item_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSet object can get a specific item from Typesense Server.""" + response = await actual_async_synonym_sets["test-set"].get_item("company_synonym") + + assert response == { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + + +async def test_actual_upsert_item_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSet object can upsert an item in Typesense Server.""" + payload: SynonymItemSchema = { + "id": "brand_synonym", + "synonyms": ["brand", "brands", "label"], + } + response = await actual_async_synonym_sets["test-set"].upsert_item( + "brand_synonym", payload + ) + + assert response == { + "id": "brand_synonym", + "synonyms": ["brand", "brands", "label"], + } + + +async def test_actual_delete_item_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSet object can delete an item from Typesense Server.""" + response = await actual_async_synonym_sets["test-set"].delete_item( + "company_synonym" + ) + + assert response == {"id": "company_synonym"} diff --git a/tests/synonym_set_test.py b/tests/synonym_set_test.py new file mode 100644 index 0000000..7edebb4 --- /dev/null +++ b/tests/synonym_set_test.py @@ -0,0 +1,121 @@ +"""Tests for the SynonymSet class.""" + + +import pytest + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.synonym_sets import AsyncSynonymSets +from typesense.sync.client import Client +from typesense.sync.synonym_set import SynonymSet +from typesense.sync.synonym_sets import SynonymSets + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run synonym set tests only on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the SynonymSet object is initialized correctly.""" + synset = SynonymSet(fake_api_call, "test-set") + + assert synset.name == "test-set" + assert_match_object(synset.api_call, fake_api_call) + assert_object_lists_match( + synset.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + synset.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert synset._endpoint_path == "/synonym_sets/test-set" # noqa: WPS437 + + +def test_actual_retrieve( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can retrieve a synonym set from Typesense Server.""" + response = actual_synonym_sets["test-set"].retrieve() + + assert response == { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + + +def test_actual_delete( + actual_synonym_sets: SynonymSets, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can delete a synonym set from Typesense Server.""" + response = actual_synonym_sets["test-set"].delete() + + assert response == {"name": "test-set"} + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncSynonymSet object is initialized correctly.""" + from typesense.async_.synonym_set import AsyncSynonymSet + + synset = AsyncSynonymSet(fake_async_api_call, "test-set") + + assert synset.name == "test-set" + assert_match_object(synset.api_call, fake_async_api_call) + assert_object_lists_match( + synset.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + synset.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert synset._endpoint_path == "/synonym_sets/test-set" # noqa: WPS437 + + +async def test_actual_retrieve_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSet object can retrieve a synonym set from Typesense Server.""" + response = await actual_async_synonym_sets["test-set"].retrieve() + + assert response == { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + + +async def test_actual_delete_async( + actual_async_synonym_sets: AsyncSynonymSets, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSet object can delete a synonym set from Typesense Server.""" + response = await actual_async_synonym_sets["test-set"].delete() + + assert response == {"name": "test-set"} diff --git a/tests/synonym_sets_test.py b/tests/synonym_sets_test.py new file mode 100644 index 0000000..66a5519 --- /dev/null +++ b/tests/synonym_sets_test.py @@ -0,0 +1,149 @@ +"""Tests for the SynonymSets class.""" + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from tests.utils.version import is_v30_or_above +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.synonym_sets import AsyncSynonymSets +from typesense.sync.client import Client +from typesense.sync.synonym_sets import SynonymSets + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Run synonym sets tests only on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the SynonymSets object is initialized correctly.""" + synsets = SynonymSets(fake_api_call) + + assert_match_object(synsets.api_call, fake_api_call) + assert_object_lists_match( + synsets.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + synsets.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + +def test_actual_create( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, +) -> None: + """Test that the SynonymSets object can create a synonym set on Typesense Server.""" + response = actual_synonym_sets["test-set"].upsert( + { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + }, + ) + + assert response == { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + + +def test_actual_retrieve( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSets object can retrieve a synonym set from Typesense Server.""" + response = actual_synonym_sets.retrieve() + + assert isinstance(response, list) + assert_to_contain_object( + response[0], + { + "name": "test-set", + }, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncSynonymSets object is initialized correctly.""" + from typesense.async_.synonym_sets import AsyncSynonymSets + + synsets = AsyncSynonymSets(fake_async_api_call) + + assert_match_object(synsets.api_call, fake_async_api_call) + assert_object_lists_match( + synsets.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + synsets.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + +async def test_actual_create_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, +) -> None: + """Test that the AsyncSynonymSets object can create a synonym set on Typesense Server.""" + response = await actual_async_synonym_sets["test-set"].upsert( + { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + }, + ) + + assert response == { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + + +async def test_actual_retrieve_async( + actual_async_synonym_sets: AsyncSynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the AsyncSynonymSets object can retrieve a synonym set from Typesense Server.""" + response = await actual_async_synonym_sets.retrieve() + + assert isinstance(response, list) + assert_to_contain_object( + response[0], + { + "name": "test-set", + }, + ) diff --git a/tests/synonym_test.py b/tests/synonym_test.py new file mode 100644 index 0000000..cfe6e51 --- /dev/null +++ b/tests/synonym_test.py @@ -0,0 +1,141 @@ +"""Tests for the Synonym class.""" + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from tests.utils.version import is_v30_or_above +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.collections import AsyncCollections +from typesense.sync.collections import Collections +from typesense.sync.client import Client +from typesense.sync.synonym import Synonym + + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip synonym tests on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Synonym object is initialized correctly.""" + synonym = Synonym(fake_api_call, "companies", "company_synonym") + + assert synonym.collection_name == "companies" + assert synonym.synonym_id == "company_synonym" + assert_match_object(synonym.api_call, fake_api_call) + assert_object_lists_match( + synonym.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + synonym.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + synonym._endpoint_path() # noqa: WPS437 + == "/collections/companies/synonyms/company_synonym" + ) + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_synonym: None, +) -> None: + """Test that the Synonym object can retrieve an synonym from Typesense Server.""" + response = actual_collections["companies"].synonyms["company_synonym"].retrieve() + + assert response["id"] == "company_synonym" + + assert response["synonyms"] == ["companies", "corporations", "firms"] + assert_to_contain_object( + response, + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ) + + +def test_actual_delete( + actual_collections: Collections, + delete_all: None, + create_synonym: None, +) -> None: + """Test that the Synonym object can delete an synonym from Typesense Server.""" + response = actual_collections["companies"].synonyms["company_synonym"].delete() + + assert response == {"id": "company_synonym"} + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncSynonym object is initialized correctly.""" + from typesense.async_.synonym import AsyncSynonym + + synonym = AsyncSynonym(fake_async_api_call, "companies", "company_synonym") + + assert synonym.collection_name == "companies" + assert synonym.synonym_id == "company_synonym" + assert_match_object(synonym.api_call, fake_async_api_call) + assert_object_lists_match( + synonym.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + synonym.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + assert ( + synonym._endpoint_path() # noqa: WPS437 + == "/collections/companies/synonyms/company_synonym" + ) + + +async def test_actual_retrieve_async( + actual_async_collections: AsyncCollections, + delete_all: None, + create_synonym: None, +) -> None: + """Test that the AsyncSynonym object can retrieve an synonym from Typesense Server.""" + response = ( + await actual_async_collections["companies"] + .synonyms["company_synonym"] + .retrieve() + ) + + assert response["id"] == "company_synonym" + + assert response["synonyms"] == ["companies", "corporations", "firms"] + assert_to_contain_object( + response, + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ) + + +async def test_actual_delete_async( + actual_async_collections: AsyncCollections, + delete_all: None, + create_synonym: None, +) -> None: + """Test that the AsyncSynonym object can delete an synonym from Typesense Server.""" + response = ( + await actual_async_collections["companies"].synonyms["company_synonym"].delete() + ) + + assert response == {"id": "company_synonym"} diff --git a/tests/synonyms_test.py b/tests/synonyms_test.py new file mode 100644 index 0000000..3402493 --- /dev/null +++ b/tests/synonyms_test.py @@ -0,0 +1,251 @@ +"""Tests for the Synonyms class.""" + + +import pytest + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.sync.api_call import ApiCall +from typesense.async_.api_call import AsyncApiCall +from typesense.async_.collections import AsyncCollections +from typesense.sync.collections import Collections +from tests.utils.version import is_v30_or_above +from typesense.sync.client import Client +from typesense.sync.synonyms import Synonyms + + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip synonyms tests on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Synonyms object is initialized correctly.""" + synonyms = Synonyms(fake_api_call, "companies") + + assert_match_object(synonyms.api_call, fake_api_call) + assert_object_lists_match( + synonyms.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + synonyms.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not synonyms.synonyms + + +def test_get_missing_synonym(fake_synonyms: Synonyms) -> None: + """Test that the Synonyms object can get a missing synonym.""" + synonym = fake_synonyms["company_synonym"] + + assert synonym.synonym_id == "company_synonym" + assert_match_object(synonym.api_call, fake_synonyms.api_call) + assert_object_lists_match( + synonym.api_call.node_manager.nodes, fake_synonyms.api_call.node_manager.nodes + ) + assert_match_object( + synonym.api_call.config.nearest_node, + fake_synonyms.api_call.config.nearest_node, + ) + assert synonym.collection_name == "companies" + assert ( + synonym._endpoint_path() # noqa: WPS437 + == "/collections/companies/synonyms/company_synonym" + ) + + +def test_get_existing_synonym(fake_synonyms: Synonyms) -> None: + """Test that the Synonyms object can get an existing synonym.""" + synonym = fake_synonyms["companies"] + fetched_synonym = fake_synonyms["companies"] + + assert len(fake_synonyms.synonyms) == 1 + + assert synonym is fetched_synonym + + +def test_actual_create( + actual_synonyms: Synonyms, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Synonyms object can create an synonym on Typesense Server.""" + response = actual_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + +def test_actual_update( + actual_synonyms: Synonyms, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Synonyms object can update an synonym on Typesense Server.""" + create_response = actual_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert create_response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + update_response = actual_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations"]}, + ) + + assert update_response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations"], + } + + +def test_actual_retrieve( + delete_all: None, + create_synonym: None, + actual_collections: Collections, +) -> None: + """Test that the Synonyms object can retrieve an synonym from Typesense Server.""" + response = actual_collections["companies"].synonyms.retrieve() + + assert len(response["synonyms"]) == 1 + assert_to_contain_object( + response["synonyms"][0], + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ) + + +def test_init_async(fake_async_api_call: AsyncApiCall) -> None: + """Test that the AsyncSynonyms object is initialized correctly.""" + from typesense.async_.synonyms import AsyncSynonyms + + synonyms = AsyncSynonyms(fake_async_api_call, "companies") + + assert_match_object(synonyms.api_call, fake_async_api_call) + assert_object_lists_match( + synonyms.api_call.node_manager.nodes, + fake_async_api_call.node_manager.nodes, + ) + assert_match_object( + synonyms.api_call.config.nearest_node, + fake_async_api_call.config.nearest_node, + ) + + assert not synonyms.synonyms + + +def test_get_missing_synonym_async(fake_async_synonyms) -> None: + """Test that the AsyncSynonyms object can get a missing synonym.""" + + synonym = fake_async_synonyms["company_synonym"] + + assert synonym.synonym_id == "company_synonym" + assert_match_object(synonym.api_call, fake_async_synonyms.api_call) + assert_object_lists_match( + synonym.api_call.node_manager.nodes, fake_async_synonyms.api_call.node_manager.nodes + ) + assert_match_object( + synonym.api_call.config.nearest_node, + fake_async_synonyms.api_call.config.nearest_node, + ) + assert synonym.collection_name == "companies" + assert ( + synonym._endpoint_path() # noqa: WPS437 + == "/collections/companies/synonyms/company_synonym" + ) + + +def test_get_existing_synonym_async(fake_async_synonyms) -> None: + """Test that the AsyncSynonyms object can get an existing synonym.""" + synonym = fake_async_synonyms["companies"] + fetched_synonym = fake_async_synonyms["companies"] + + assert len(fake_async_synonyms.synonyms) == 1 + + assert synonym is fetched_synonym + + +async def test_actual_create_async( + actual_async_synonyms, + delete_all: None, + create_collection: None, +) -> None: + """Test that the AsyncSynonyms object can create an synonym on Typesense Server.""" + response = await actual_async_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + +async def test_actual_update_async( + actual_async_synonyms, + delete_all: None, + create_collection: None, +) -> None: + """Test that the AsyncSynonyms object can update an synonym on Typesense Server.""" + create_response = await actual_async_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert create_response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + update_response = await actual_async_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations"]}, + ) + + assert update_response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations"], + } + + +async def test_actual_retrieve_async( + delete_all: None, + create_synonym: None, + actual_async_collections: AsyncCollections, +) -> None: + """Test that the AsyncSynonyms object can retrieve an synonym from Typesense Server.""" + response = await actual_async_collections["companies"].synonyms.retrieve() + + assert len(response["synonyms"]) == 1 + assert_to_contain_object( + response["synonyms"][0], + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ) diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py new file mode 100644 index 0000000..ffe95f4 --- /dev/null +++ b/tests/utils/object_assertions.py @@ -0,0 +1,121 @@ +"""Utility functions for asserting that objects have the same attribute values.""" + +import difflib +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +TObj = typing.TypeVar("TObj", bound=object) + + +def obj_to_dict( + input_obj: typing.Union[TObj, typing.Dict[str, typing.Any]], +) -> typing.Dict[str, typing.Any]: + """ + Convert an object to a dictionary. + + If the object is already a dictionary, return it as is. + + Args: + input_obj: The object to convert. + + Returns: + The object as a dictionary. + """ + return input_obj if isinstance(input_obj, typing.Dict) else input_obj.__dict__ + + +def assert_match_object( + actual: typing.Union[TObj, typing.Dict[str, typing.Any]], + expected: typing.Union[TObj, typing.Dict[str, typing.Any]], +) -> None: + """ + Assert that two objects have the same attribute values. + + Args: + actual: The actual object. + expected: The expected object. + + Raises: + AssertionError: If the objects do not have the same attribute values. + """ + actual_attrs = obj_to_dict(actual) + + expected_attrs = obj_to_dict(expected) + + for key, _ in actual_attrs.items(): + assert key in expected_attrs, f"Attribute {key} not found in expected object" + + if actual_attrs[key] != expected_attrs[key]: + raise_with_diff([{key: expected_attrs[key]}], [{key: actual_attrs[key]}]) + + +def assert_to_contain_keys( + actual: typing.Dict[str, typing.Any], + keys: typing.List[str], +) -> None: + """Assert that the actual dictionary contains the expected keys.""" + for key in keys: + assert key in actual, f"Key {key} not found in actual dictionary" + + +def assert_to_contain_object( + actual: typing.Union[TObj, typing.Dict[str, typing.Any]], + expected: typing.Union[TObj, typing.Dict[str, typing.Any]], +) -> None: + """Assert that two objects have the same attribute values.""" + actual_attrs = obj_to_dict(actual) + + expected_attrs = obj_to_dict(expected) + + for key, _ in expected_attrs.items(): + assert key in actual_attrs, f"Attribute {key} not found in expected object" + + if actual_attrs[key] != expected_attrs[key]: + raise_with_diff([{key: expected_attrs[key]}], [{key: actual_attrs[key]}]) + + +def assert_object_lists_match( + actual: typing.List[TObj], + expected: typing.List[typing.Union[TObj, typing.Dict[str, typing.Any]]], +) -> None: + """Assert that two lists of objects have the same attribute values.""" + actual_dicts = [obj_to_dict(actual_obj) for actual_obj in actual] + expected_dicts = [obj_to_dict(expected_obj) for expected_obj in expected] + + actual_counter = typing.Counter( + tuple(sorted(dict_entry.items())) for dict_entry in actual_dicts + ) + expected_counter = typing.Counter( + tuple(sorted(dict_entry.items())) for dict_entry in expected_dicts + ) + if actual_counter != expected_counter: + raise_with_diff(expected_dicts, actual_dicts) + + +def raise_with_diff( + expected_dicts: typing.Sequence[dict[str, typing.Any]], + actual_dicts: typing.Sequence[dict[str, typing.Any]], +) -> None: + """ + Raise an AssertionError with a unified diff of the expected and actual values. + + Args: + expected: The expected value. + actual: The actual value. + """ + expected_str = [str(sorted(dict_entry.items())) for dict_entry in expected_dicts] + actual_str = [str(sorted(dict_entry.items())) for dict_entry in actual_dicts] + diff = difflib.unified_diff( + expected_str, + actual_str, + fromfile="expected", + tofile="actual", + lineterm="", + ) + diff_output = "\n".join(diff) + raise AssertionError(f"Lists do not contain the same elements:\n{diff_output}") diff --git a/tests/utils/version.py b/tests/utils/version.py new file mode 100644 index 0000000..e5bb783 --- /dev/null +++ b/tests/utils/version.py @@ -0,0 +1,22 @@ + +from typesense.sync.client import Client + + +def is_v30_or_above(client: Client) -> bool: + try: + debug = client.debug.retrieve() + version = debug.get("version") + if version == "nightly": + return True + try: + version_str = str(version) + if version_str.startswith("v"): + numbered = version_str.split("v", 1)[1] + else: + numbered = version_str + major_version = numbered.split(".", 1)[0] + return int(major_version) >= 30 + except Exception: + return False + except Exception: + return False diff --git a/typesense/__init__.py b/typesense/__init__.py deleted file mode 100644 index 7f6a307..0000000 --- a/typesense/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .client import Client # NOQA -import logging - -logging.basicConfig(level=logging.WARN) diff --git a/typesense/alias.py b/typesense/alias.py deleted file mode 100644 index 5695937..0000000 --- a/typesense/alias.py +++ /dev/null @@ -1,14 +0,0 @@ -class Alias(object): - def __init__(self, api_call, name): - self.api_call = api_call - self.name = name - - def _endpoint_path(self): - from .aliases import Aliases - return u"{0}/{1}".format(Aliases.RESOURCE_PATH, self.name) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def delete(self): - return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/aliases.py b/typesense/aliases.py deleted file mode 100644 index 2545eb8..0000000 --- a/typesense/aliases.py +++ /dev/null @@ -1,24 +0,0 @@ -from typesense.alias import Alias - - -class Aliases(object): - RESOURCE_PATH = '/aliases' - - def __init__(self, api_call): - self.api_call = api_call - self.aliases = {} - - def __getitem__(self, name): - if name not in self.aliases: - self.aliases[name] = Alias(self.api_call, name) - - return self.aliases.get(name) - - def _endpoint_path(self, alias_name): - return u"{0}/{1}".format(Aliases.RESOURCE_PATH, alias_name) - - def upsert(self, name, mapping): - return self.api_call.put(self._endpoint_path(name), mapping) - - def retrieve(self): - return self.api_call.get(Aliases.RESOURCE_PATH) diff --git a/typesense/api_call.py b/typesense/api_call.py deleted file mode 100644 index 52354b5..0000000 --- a/typesense/api_call.py +++ /dev/null @@ -1,159 +0,0 @@ -import copy -import logging -import json -import time - -import requests -from .exceptions import (HTTPStatus0Error, ObjectAlreadyExists, - ObjectNotFound, ObjectUnprocessable, - RequestMalformed, RequestUnauthorized, RequestForbidden, - ServerError, ServiceUnavailable, TypesenseClientError) - -logger = logging.getLogger(__name__) - - -class ApiCall(object): - API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' - - def __init__(self, config): - self.config = config - self.nodes = copy.deepcopy(self.config.nodes) - self.node_index = 0 - self._initialize_nodes() - - def _initialize_nodes(self): - if self.config.nearest_node: - self.set_node_healthcheck(self.config.nearest_node, True) - - for node in self.nodes: - self.set_node_healthcheck(node, True) - - def node_due_for_health_check(self, node): - current_epoch_ts = int(time.time()) - due_for_check = (current_epoch_ts - node.last_access_ts) > self.config.healthcheck_interval_seconds - if due_for_check: - logger.debug('Node {}:{} is due for health check.'.format(node.host, node.port)) - return due_for_check - - # Returns a healthy host from the pool in a round-robin fashion. - # Might return an unhealthy host periodically to check for recovery. - def get_node(self): - if self.config.nearest_node: - if self.config.nearest_node.healthy or self.node_due_for_health_check(self.config.nearest_node): - logger.debug('Using nearest node.') - return self.config.nearest_node - else: - logger.debug('Nearest node is unhealthy or not due for health check. Falling back to individual nodes.') - - i = 0 - while i < len(self.nodes): - i += 1 - node = self.nodes[self.node_index] - self.node_index = (self.node_index + 1) % len(self.nodes) - - if node.healthy or self.node_due_for_health_check(node): - return node - - # None of the nodes are marked healthy, but some of them could have become healthy since last health check. - # So we will just return the next node. - logger.debug('No healthy nodes were found. Returning the next node.') - return self.nodes[self.node_index] - - @staticmethod - def get_exception(http_code): - if http_code == 0: - return HTTPStatus0Error - elif http_code == 400: - return RequestMalformed - elif http_code == 401: - return RequestUnauthorized - elif http_code == 403: - return RequestForbidden - elif http_code == 404: - return ObjectNotFound - elif http_code == 409: - return ObjectAlreadyExists - elif http_code == 422: - return ObjectUnprocessable - elif http_code == 500: - return ServerError - elif http_code == 503: - return ServiceUnavailable - else: - return TypesenseClientError - - # Makes the actual http request, along with retries - def make_request(self, fn, endpoint, as_json, **kwargs): - num_tries = 0 - last_exception = None - - logger.debug('Making {} {}'.format(fn.__name__, endpoint)) - - while num_tries < (self.config.num_retries + 1): - num_tries += 1 - node = self.get_node() - - logger.debug('Try {} to node {}:{} -- healthy? {}'.format(num_tries, node.host, node.port, node.healthy)) - - try: - url = node.url() + endpoint - if kwargs.get('data') and not isinstance(kwargs['data'], str): - kwargs['data'] = json.dumps(kwargs['data']) - - r = fn(url, headers={ApiCall.API_KEY_HEADER_NAME: self.config.api_key}, **kwargs) - - # Treat any status code > 0 and < 500 to be an indication that node is healthy - # We exclude 0 since some clients return 0 when request fails - if 0 < r.status_code < 500: - logger.debug('{}:{} is healthy. Status code: {}'.format(node.host, node.port, r.status_code)) - self.set_node_healthcheck(node, True) - - # We should raise a custom exception if status code is not 20X - if not 200 <= r.status_code < 300: - error_message = r.json().get('message', 'API error.') - # Raised exception will be caught and retried - raise ApiCall.get_exception(r.status_code)(r.status_code, error_message) - - return r.json() if as_json else r.text - except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, requests.exceptions.HTTPError, - requests.exceptions.RequestException, requests.exceptions.SSLError, - HTTPStatus0Error, ServerError, ServiceUnavailable) as e: - # Catch the exception and retry - self.set_node_healthcheck(node, False) - logger.debug('Request to {}:{} failed because of {}'.format(node.host, node.port, e)) - logger.debug('Sleeping for {} and retrying...'.format(self.config.retry_interval_seconds)) - last_exception = e - time.sleep(self.config.retry_interval_seconds) - - logger.debug('No retries left. Raising last exception: {}'.format(last_exception)) - raise last_exception - - def set_node_healthcheck(self, node, is_healthy): - node.healthy = is_healthy - node.last_access_ts = int(time.time()) - - def get(self, endpoint, params=None, as_json=True): - params = params or {} - return self.make_request(requests.get, endpoint, as_json, - params=params, - timeout=self.config.connection_timeout_seconds) - - def post(self, endpoint, body, params=None, as_json=True): - params = params or {} - return self.make_request(requests.post, endpoint, as_json, - params=params, data=body, - timeout=self.config.connection_timeout_seconds) - - def put(self, endpoint, body, params=None): - return self.make_request(requests.put, endpoint, True, - params=params, data=body, - timeout=self.config.connection_timeout_seconds) - - def patch(self, endpoint, body, params=None): - return self.make_request(requests.patch, endpoint, True, - params=params, data=body, - timeout=self.config.connection_timeout_seconds) - - def delete(self, endpoint, params=None): - return self.make_request(requests.delete, endpoint, True, - params=params, timeout=self.config.connection_timeout_seconds) diff --git a/typesense/client.py b/typesense/client.py deleted file mode 100644 index aa9563d..0000000 --- a/typesense/client.py +++ /dev/null @@ -1,20 +0,0 @@ -from .aliases import Aliases -from .debug import Debug -from .collections import Collections -from .multi_search import MultiSearch -from .keys import Keys -from .operations import Operations -from .configuration import Configuration -from .api_call import ApiCall - - -class Client(object): - def __init__(self, config_dict): - self.config = Configuration(config_dict) - self.api_call = ApiCall(self.config) - self.collections = Collections(self.api_call) - self.multi_search = MultiSearch(self.api_call) - self.keys = Keys(self.api_call) - self.aliases = Aliases(self.api_call) - self.operations = Operations(self.api_call) - self.debug = Debug(self.api_call) diff --git a/typesense/collection.py b/typesense/collection.py deleted file mode 100644 index 0e3cd30..0000000 --- a/typesense/collection.py +++ /dev/null @@ -1,22 +0,0 @@ -from .overrides import Overrides -from .synonyms import Synonyms -from .documents import Documents - - -class Collection(object): - def __init__(self, api_call, name): - self.name = name - self.api_call = api_call - self.documents = Documents(api_call, name) - self.overrides = Overrides(api_call, name) - self.synonyms = Synonyms(api_call, name) - - def _endpoint_path(self): - from .collections import Collections - return u"{0}/{1}".format(Collections.RESOURCE_PATH, self.name) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def delete(self): - return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/collections.py b/typesense/collections.py deleted file mode 100644 index 4f98eb8..0000000 --- a/typesense/collections.py +++ /dev/null @@ -1,21 +0,0 @@ -from .collection import Collection - - -class Collections(object): - RESOURCE_PATH = '/collections' - - def __init__(self, api_call): - self.api_call = api_call - self.collections = {} - - def __getitem__(self, collection_name): - if collection_name not in self.collections: - self.collections[collection_name] = Collection(self.api_call, collection_name) - - return self.collections.get(collection_name) - - def create(self, schema): - return self.api_call.post(Collections.RESOURCE_PATH, schema) - - def retrieve(self): - return self.api_call.get('{0}'.format(Collections.RESOURCE_PATH)) diff --git a/typesense/configuration.py b/typesense/configuration.py deleted file mode 100644 index 0fc2907..0000000 --- a/typesense/configuration.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging - -from .exceptions import ConfigError - -logger = logging.getLogger(__name__) - - -class Node(object): - def __init__(self, host, port, path, protocol): - self.host = host - self.port = port - self.path = path - self.protocol = protocol - - # Used to skip bad hosts - self.healthy = True - - def url(self): - return '{0}://{1}:{2}{3}'.format(self.protocol, self.host, self.port, self.path) - - -class Configuration(object): - def __init__(self, config_dict): - Configuration.show_deprecation_warnings(config_dict) - Configuration.validate_config_dict(config_dict) - - node_dicts = config_dict.get('nodes', []) - - self.nodes = [] - for node_dict in node_dicts: - self.nodes.append( - Node(node_dict['host'], node_dict['port'], node_dict.get('path', ''), node_dict['protocol']) - ) - - self.nearest_node = config_dict.get('nearest_node', None) - - self.api_key = config_dict.get('api_key', '') - self.connection_timeout_seconds = config_dict.get('connection_timeout_seconds', 3.0) - self.num_retries = config_dict.get('num_retries', 3) - self.retry_interval_seconds = config_dict.get('retry_interval_seconds', 1.0) - self.healthcheck_interval_seconds = config_dict.get('healthcheck_interval_seconds', 60) - - @staticmethod - def validate_config_dict(config_dict): - nodes = config_dict.get('nodes', None) - if not nodes: - raise ConfigError('`nodes` is not defined.') - - api_key = config_dict.get('api_key', None) - if not api_key: - raise ConfigError('`api_key` is not defined.') - - for node in nodes: - if not Configuration.validate_node_fields(node): - raise ConfigError('`node` entry must be a dictionary with the following required keys: ' - 'host, port, protocol') - - nearest_node = config_dict.get('nearest_node', None) - if nearest_node and not Configuration.validate_node_fields(nearest_node): - raise ConfigError('`nearest_node` entry must be a dictionary with the following required keys: ' - 'host, port, protocol') - - @staticmethod - def validate_node_fields(node): - expected_fields = {'host', 'port', 'protocol'} - return expected_fields.issubset(node) - - @staticmethod - def show_deprecation_warnings(config_dict): - if config_dict.get('timeout_seconds'): - logger.warn('Deprecation warning: timeout_seconds is now renamed to connection_timeout_seconds') - - if config_dict.get('master_node'): - logger.warn('Deprecation warning: master_node is now consolidated to nodes, starting with Typesense Server v0.12') - - if config_dict.get('read_replica_nodes'): - logger.warn('Deprecation warning: read_replica_nodes is now consolidated to nodes, starting with Typesense Server v0.12') - diff --git a/typesense/debug.py b/typesense/debug.py deleted file mode 100644 index e0e8128..0000000 --- a/typesense/debug.py +++ /dev/null @@ -1,9 +0,0 @@ -class Debug(object): - RESOURCE_PATH = '/debug' - - def __init__(self,api_call): - self.api_call = api_call - self.collections = {} - - def retrieve(self): - return self.api_call.get('{0}'.format(Debug.RESOURCE_PATH)) diff --git a/typesense/document.py b/typesense/document.py deleted file mode 100644 index 49c730c..0000000 --- a/typesense/document.py +++ /dev/null @@ -1,20 +0,0 @@ -class Document(object): - def __init__(self, api_call, collection_name, document_id): - self.api_call = api_call - self.collection_name = collection_name - self.document_id = document_id - - def _endpoint_path(self): - from .documents import Documents - from .collections import Collections - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Documents.RESOURCE_PATH, - self.document_id) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def update(self, document): - return self.api_call.patch(self._endpoint_path(), document) - - def delete(self): - return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/documents.py b/typesense/documents.py deleted file mode 100644 index 7f4ec0e..0000000 --- a/typesense/documents.py +++ /dev/null @@ -1,76 +0,0 @@ -import json -import logging - -from .document import Document - -logger = logging.getLogger(__name__) - - -class Documents(object): - RESOURCE_PATH = 'documents' - - def __init__(self, api_call, collection_name): - self.api_call = api_call - self.collection_name = collection_name - self.documents = {} - - def __getitem__(self, document_id): - if document_id not in self.documents: - self.documents[document_id] = Document(self.api_call, self.collection_name, document_id) - - return self.documents[document_id] - - def _endpoint_path(self, action=None): - from .collections import Collections - - action = action or '' - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Documents.RESOURCE_PATH, - action) - - def create(self, document): - return self.api_call.post(self._endpoint_path(), document, {'action': 'create'}) - - def create_many(self, documents, params=None): - logger.warning('`create_many` is deprecated: please use `import_`.') - return self.import_(documents, params) - - def upsert(self, document): - return self.api_call.post(self._endpoint_path(), document, {'action': 'upsert'}) - - def update(self, document): - return self.api_call.post(self._endpoint_path(), document, {'action': 'update'}) - - def import_jsonl(self, documents_jsonl): - logger.warning('`import_jsonl` is deprecated: please use `import_`.') - return self.import_(documents_jsonl) - - # `documents` can be either a list of document objects (or) - # JSONL-formatted string containing multiple documents - def import_(self, documents, params=None): - if isinstance(documents, list): - document_strs = [] - for document in documents: - document_strs.append(json.dumps(document)) - - docs_import = '\n'.join(document_strs) - api_response = self.api_call.post(self._endpoint_path('import'), docs_import, params, as_json=False) - res_obj_strs = api_response.split('\n') - - response_objs = [] - for res_obj_str in res_obj_strs: - response_objs.append(json.dumps(res_obj_str)) - - return response_objs - else: - api_response = self.api_call.post(self._endpoint_path('import'), documents, params, as_json=False) - return api_response - - def export(self): - api_response = self.api_call.get(self._endpoint_path('export'), {}, as_json=False) - return api_response - - def search(self, search_parameters): - return self.api_call.get(self._endpoint_path('search'), search_parameters) - - def delete(self, params=None): - return self.api_call.delete(self._endpoint_path(), params) diff --git a/typesense/exceptions.py b/typesense/exceptions.py deleted file mode 100644 index 7d9fed6..0000000 --- a/typesense/exceptions.py +++ /dev/null @@ -1,47 +0,0 @@ -class TypesenseClientError(IOError): - def __init__(self, *args, **kwargs): - super(TypesenseClientError, self).__init__(*args, **kwargs) - - -class ConfigError(TypesenseClientError): - pass - - -class Timeout(TypesenseClientError): - pass - - -class RequestMalformed(TypesenseClientError): - pass - - -class RequestUnauthorized(TypesenseClientError): - pass - - -class RequestForbidden(TypesenseClientError): - pass - - -class ObjectNotFound(TypesenseClientError): - pass - - -class ObjectAlreadyExists(TypesenseClientError): - pass - - -class ObjectUnprocessable(TypesenseClientError): - pass - - -class ServerError(TypesenseClientError): - pass - - -class ServiceUnavailable(TypesenseClientError): - pass - - -class HTTPStatus0Error(TypesenseClientError): - pass diff --git a/typesense/key.py b/typesense/key.py deleted file mode 100644 index d750598..0000000 --- a/typesense/key.py +++ /dev/null @@ -1,16 +0,0 @@ - - -class Key(object): - def __init__(self, api_call, key_id): - self.key_id = key_id - self.api_call = api_call - - def _endpoint_path(self): - from .keys import Keys - return u"{0}/{1}".format(Keys.RESOURCE_PATH, self.key_id) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def delete(self): - return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/keys.py b/typesense/keys.py deleted file mode 100644 index ea903dc..0000000 --- a/typesense/keys.py +++ /dev/null @@ -1,36 +0,0 @@ -import base64 -import hashlib -import hmac -import json - -from .key import Key - - -class Keys(object): - RESOURCE_PATH = '/keys' - - def __init__(self, api_call): - self.api_call = api_call - self.keys = {} - - def __getitem__(self, key_id): - if key_id not in self.keys: - self.keys[key_id] = Key(self.api_call, key_id) - - return self.keys.get(key_id) - - def create(self, schema): - return self.api_call.post(Keys.RESOURCE_PATH, schema) - - def generate_scoped_search_key(self, search_key, parameters): - # Note: only a key generated with the `documents:search` action will be accepted by the server - params_str = json.dumps(parameters) - digest = base64.b64encode( - hmac.new(search_key.encode('utf-8'), params_str.encode('utf-8'), digestmod=hashlib.sha256).digest() - ) - key_prefix = search_key[0:4] - raw_scoped_key = '{}{}{}'.format(digest.decode('utf-8'), key_prefix, params_str) - return base64.b64encode(raw_scoped_key.encode('utf-8')) - - def retrieve(self): - return self.api_call.get('{0}'.format(Keys.RESOURCE_PATH)) diff --git a/typesense/multi_search.py b/typesense/multi_search.py deleted file mode 100644 index 1e80a50..0000000 --- a/typesense/multi_search.py +++ /dev/null @@ -1,9 +0,0 @@ - -class MultiSearch(object): - RESOURCE_PATH = '/multi_search' - - def __init__(self, api_call): - self.api_call = api_call - - def perform(self, search_queries, common_params): - return self.api_call.post(MultiSearch.RESOURCE_PATH, search_queries, common_params) diff --git a/typesense/operations.py b/typesense/operations.py deleted file mode 100644 index cf94319..0000000 --- a/typesense/operations.py +++ /dev/null @@ -1,14 +0,0 @@ - -class Operations(object): - RESOURCE_PATH = '/operations' - - def __init__(self, api_call): - self.api_call = api_call - - @staticmethod - def _endpoint_path(operation_name): - return u"{0}/{1}".format(Operations.RESOURCE_PATH, operation_name) - - def perform(self, operation_name, query_params=None): - query_params = query_params or {} - return self.api_call.post(self._endpoint_path(operation_name), {}, query_params) diff --git a/typesense/override.py b/typesense/override.py deleted file mode 100644 index b66006f..0000000 --- a/typesense/override.py +++ /dev/null @@ -1,17 +0,0 @@ -class Override(object): - def __init__(self, api_call, collection_name, override_id): - self.api_call = api_call - self.collection_name = collection_name - self.override_id = override_id - - def _endpoint_path(self): - from .overrides import Overrides - from .collections import Collections - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Overrides.RESOURCE_PATH, - self.override_id) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def delete(self): - return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/overrides.py b/typesense/overrides.py deleted file mode 100644 index 2b258ff..0000000 --- a/typesense/overrides.py +++ /dev/null @@ -1,28 +0,0 @@ -from .override import Override - - -class Overrides(object): - RESOURCE_PATH = 'overrides' - - def __init__(self, api_call, collection_name): - self.api_call = api_call - self.collection_name = collection_name - self.overrides = {} - - def __getitem__(self, override_id): - if override_id not in self.overrides: - self.overrides[override_id] = Override(self.api_call, self.collection_name, override_id) - - return self.overrides[override_id] - - def _endpoint_path(self, override_id=None): - from .collections import Collections - override_id = override_id or '' - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, - Overrides.RESOURCE_PATH, override_id) - - def upsert(self, id, schema): - return self.api_call.put(self._endpoint_path(id), schema) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) diff --git a/typesense/synonym.py b/typesense/synonym.py deleted file mode 100644 index 27e2e08..0000000 --- a/typesense/synonym.py +++ /dev/null @@ -1,17 +0,0 @@ -class Synonym(object): - def __init__(self, api_call, collection_name, synonym_id): - self.api_call = api_call - self.collection_name = collection_name - self.synonym_id = synonym_id - - def _endpoint_path(self): - from .synonyms import Synonyms - from .collections import Collections - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Synonyms.RESOURCE_PATH, - self.synonym_id) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def delete(self): - return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/synonyms.py b/typesense/synonyms.py deleted file mode 100644 index 3b6cf28..0000000 --- a/typesense/synonyms.py +++ /dev/null @@ -1,28 +0,0 @@ -from .synonym import Synonym - - -class Synonyms(object): - RESOURCE_PATH = 'synonyms' - - def __init__(self, api_call, collection_name): - self.api_call = api_call - self.collection_name = collection_name - self.synonyms = {} - - def __getitem__(self, synonym_id): - if synonym_id not in self.synonyms: - self.synonyms[synonym_id] = Synonym(self.api_call, self.collection_name, synonym_id) - - return self.synonyms[synonym_id] - - def _endpoint_path(self, synonym_id=None): - from .collections import Collections - synonym_id = synonym_id or '' - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, - Synonyms.RESOURCE_PATH, synonym_id) - - def upsert(self, id, schema): - return self.api_call.put(self._endpoint_path(id), schema) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) diff --git a/utils/run-unasync.py b/utils/run-unasync.py new file mode 100644 index 0000000..49feabe --- /dev/null +++ b/utils/run-unasync.py @@ -0,0 +1,114 @@ +import argparse +import difflib +import filecmp +import os +import re +import shutil +from pathlib import Path + +import unasync + + +ASYNC_DIR = Path("src/typesense/async_") +SYNC_DIR = Path("src/typesense/sync") +CHECK_DIR = Path("src/typesense/sync_check") + + +def collect_class_replacements(source_dir: Path) -> dict[str, str]: + replacements: dict[str, str] = {} + pattern = re.compile(r"^class\s+(Async\w+)", re.MULTILINE) + for path in source_dir.rglob("*.py"): + text = path.read_text() + for match in pattern.finditer(text): + async_name = match.group(1) + replacements[async_name] = async_name[len("Async") :] + replacements["aclose"] = "close" + return replacements + + +def collect_files(source_dir: Path) -> list[str]: + filepaths: list[str] = [] + for root, _, filenames in os.walk(source_dir): + for filename in filenames: + if filename.endswith(".py"): + filepaths.append(os.path.join(root, filename)) + return filepaths + + +def run_unasync(output_dir: Path, check: bool = False) -> None: + source_dir = ASYNC_DIR.resolve() + target_dir = output_dir.resolve() + target_dir.mkdir(parents=True, exist_ok=True) + + replacements = collect_class_replacements(source_dir) + rule = unasync.Rule( + fromdir=f"{source_dir.as_posix()}/", + todir=f"{target_dir.as_posix()}/", + additional_replacements=replacements, + ) + filepaths = collect_files(source_dir) + unasync.unasync_files(filepaths, [rule]) + if replacements: + for path in target_dir.rglob("*.py"): + text = path.read_text() + new_text = text + for old, new in replacements.items(): + new_text = new_text.replace(old, new) + if new_text != text: + path.write_text(new_text) + + if check: + diffs: list[str] = [] + for path in target_dir.rglob("*.py"): + rel = path.relative_to(target_dir) + expected = SYNC_DIR / rel + if not expected.exists(): + diffs.append(f"Missing in sync: {expected}") + continue + if not filecmp.cmp(path, expected, shallow=False): + diffs.append(f"Differs: {expected}") + if diffs: + header = [ + "Sync sources are out of date.", + "Run: uv run python utils/run-unasync.py", + "", + "Differences:", + ] + details: list[str] = [] + first_diff = next((d for d in diffs if d.startswith("Differs: ")), None) + if first_diff: + mismatch = first_diff.replace("Differs: ", "") + generated = target_dir / Path(mismatch).relative_to(SYNC_DIR) + if generated.exists() and Path(mismatch).exists(): + expected_lines = Path(mismatch).read_text().splitlines() + generated_lines = generated.read_text().splitlines() + diff_lines = list( + difflib.unified_diff( + expected_lines, + generated_lines, + fromfile=mismatch, + tofile=str(generated), + lineterm="", + ) + ) + details.extend(["", "Sample diff:", *diff_lines[:200]]) + raise SystemExit("\n".join([*header, *diffs, *details])) + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument("--check", action="store_true") + args = parser.parse_args() + + if args.check: + if CHECK_DIR.exists(): + shutil.rmtree(CHECK_DIR) + run_unasync(CHECK_DIR, check=True) + shutil.rmtree(CHECK_DIR) + return + + run_unasync(SYNC_DIR) + + +if __name__ == "__main__": + main() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..a1ae574 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1018 @@ +version = 1 +revision = 3 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version < '3.10'", +] + +[[package]] +name = "anyio" +version = "4.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "certifi" +version = "2025.11.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/46/7c/0c4760bccf082737ca7ab84a4c2034fcc06b1f21cf3032ea98bd6feb1725/charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9", size = 209609, upload-time = "2025-10-14T04:42:10.922Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/69719daef2f3d7f1819de60c9a6be981b8eeead7542d5ec4440f3c80e111/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d", size = 149029, upload-time = "2025-10-14T04:42:12.38Z" }, + { url = "https://files.pythonhosted.org/packages/e6/21/8d4e1d6c1e6070d3672908b8e4533a71b5b53e71d16828cc24d0efec564c/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608", size = 144580, upload-time = "2025-10-14T04:42:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/a7/0a/a616d001b3f25647a9068e0b9199f697ce507ec898cacb06a0d5a1617c99/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc", size = 162340, upload-time = "2025-10-14T04:42:14.892Z" }, + { url = "https://files.pythonhosted.org/packages/85/93/060b52deb249a5450460e0585c88a904a83aec474ab8e7aba787f45e79f2/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e", size = 159619, upload-time = "2025-10-14T04:42:16.676Z" }, + { url = "https://files.pythonhosted.org/packages/dd/21/0274deb1cc0632cd587a9a0ec6b4674d9108e461cb4cd40d457adaeb0564/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1", size = 153980, upload-time = "2025-10-14T04:42:17.917Z" }, + { url = "https://files.pythonhosted.org/packages/28/2b/e3d7d982858dccc11b31906976323d790dded2017a0572f093ff982d692f/charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3", size = 152174, upload-time = "2025-10-14T04:42:19.018Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ff/4a269f8e35f1e58b2df52c131a1fa019acb7ef3f8697b7d464b07e9b492d/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6", size = 151666, upload-time = "2025-10-14T04:42:20.171Z" }, + { url = "https://files.pythonhosted.org/packages/da/c9/ec39870f0b330d58486001dd8e532c6b9a905f5765f58a6f8204926b4a93/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88", size = 145550, upload-time = "2025-10-14T04:42:21.324Z" }, + { url = "https://files.pythonhosted.org/packages/75/8f/d186ab99e40e0ed9f82f033d6e49001701c81244d01905dd4a6924191a30/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1", size = 163721, upload-time = "2025-10-14T04:42:22.46Z" }, + { url = "https://files.pythonhosted.org/packages/96/b1/6047663b9744df26a7e479ac1e77af7134b1fcf9026243bb48ee2d18810f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf", size = 152127, upload-time = "2025-10-14T04:42:23.712Z" }, + { url = "https://files.pythonhosted.org/packages/59/78/e5a6eac9179f24f704d1be67d08704c3c6ab9f00963963524be27c18ed87/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318", size = 161175, upload-time = "2025-10-14T04:42:24.87Z" }, + { url = "https://files.pythonhosted.org/packages/e5/43/0e626e42d54dd2f8dd6fc5e1c5ff00f05fbca17cb699bedead2cae69c62f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c", size = 155375, upload-time = "2025-10-14T04:42:27.246Z" }, + { url = "https://files.pythonhosted.org/packages/e9/91/d9615bf2e06f35e4997616ff31248c3657ed649c5ab9d35ea12fce54e380/charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505", size = 99692, upload-time = "2025-10-14T04:42:28.425Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a9/6c040053909d9d1ef4fcab45fddec083aedc9052c10078339b47c8573ea8/charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966", size = 107192, upload-time = "2025-10-14T04:42:29.482Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c6/4fa536b2c0cd3edfb7ccf8469fa0f363ea67b7213a842b90909ca33dd851/charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50", size = 100220, upload-time = "2025-10-14T04:42:30.632Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, + { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, + { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, + { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/d1c25053764b4c42eb294aae92ab617d2e4f803397f9c7c8295caa77a260/coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3", size = 217978, upload-time = "2025-09-21T20:03:30.362Z" }, + { url = "https://files.pythonhosted.org/packages/52/2f/b9f9daa39b80ece0b9548bbb723381e29bc664822d9a12c2135f8922c22b/coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c", size = 218370, upload-time = "2025-09-21T20:03:32.147Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6e/30d006c3b469e58449650642383dddf1c8fb63d44fdf92994bfd46570695/coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396", size = 244802, upload-time = "2025-09-21T20:03:33.919Z" }, + { url = "https://files.pythonhosted.org/packages/b0/49/8a070782ce7e6b94ff6a0b6d7c65ba6bc3091d92a92cef4cd4eb0767965c/coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40", size = 246625, upload-time = "2025-09-21T20:03:36.09Z" }, + { url = "https://files.pythonhosted.org/packages/6a/92/1c1c5a9e8677ce56d42b97bdaca337b2d4d9ebe703d8c174ede52dbabd5f/coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594", size = 248399, upload-time = "2025-09-21T20:03:38.342Z" }, + { url = "https://files.pythonhosted.org/packages/c0/54/b140edee7257e815de7426d5d9846b58505dffc29795fff2dfb7f8a1c5a0/coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a", size = 245142, upload-time = "2025-09-21T20:03:40.591Z" }, + { url = "https://files.pythonhosted.org/packages/e4/9e/6d6b8295940b118e8b7083b29226c71f6154f7ff41e9ca431f03de2eac0d/coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b", size = 246284, upload-time = "2025-09-21T20:03:42.355Z" }, + { url = "https://files.pythonhosted.org/packages/db/e5/5e957ca747d43dbe4d9714358375c7546cb3cb533007b6813fc20fce37ad/coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3", size = 244353, upload-time = "2025-09-21T20:03:44.218Z" }, + { url = "https://files.pythonhosted.org/packages/9a/45/540fc5cc92536a1b783b7ef99450bd55a4b3af234aae35a18a339973ce30/coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0", size = 244430, upload-time = "2025-09-21T20:03:46.065Z" }, + { url = "https://files.pythonhosted.org/packages/75/0b/8287b2e5b38c8fe15d7e3398849bb58d382aedc0864ea0fa1820e8630491/coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f", size = 245311, upload-time = "2025-09-21T20:03:48.19Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1d/29724999984740f0c86d03e6420b942439bf5bd7f54d4382cae386a9d1e9/coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431", size = 220500, upload-time = "2025-09-21T20:03:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/43/11/4b1e6b129943f905ca54c339f343877b55b365ae2558806c1be4f7476ed5/coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07", size = 221408, upload-time = "2025-09-21T20:03:51.803Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/08/bdd7ccca14096f7eb01412b87ac11e5d16e4cb54b6e328afc9dee8bdaec1/coverage-7.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02d9fb9eccd48f6843c98a37bd6817462f130b86da8660461e8f5e54d4c06070", size = 217979, upload-time = "2025-12-08T13:12:14.505Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f0/d1302e3416298a28b5663ae1117546a745d9d19fde7e28402b2c5c3e2109/coverage-7.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:367449cf07d33dc216c083f2036bb7d976c6e4903ab31be400ad74ad9f85ce98", size = 218496, upload-time = "2025-12-08T13:12:16.237Z" }, + { url = "https://files.pythonhosted.org/packages/07/26/d36c354c8b2a320819afcea6bffe72839efd004b98d1d166b90801d49d57/coverage-7.13.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cdb3c9f8fef0a954c632f64328a3935988d33a6604ce4bf67ec3e39670f12ae5", size = 245237, upload-time = "2025-12-08T13:12:17.858Z" }, + { url = "https://files.pythonhosted.org/packages/91/52/be5e85631e0eec547873d8b08dd67a5f6b111ecfe89a86e40b89b0c1c61c/coverage-7.13.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d10fd186aac2316f9bbb46ef91977f9d394ded67050ad6d84d94ed6ea2e8e54e", size = 247061, upload-time = "2025-12-08T13:12:19.132Z" }, + { url = "https://files.pythonhosted.org/packages/0f/45/a5e8fa0caf05fbd8fa0402470377bff09cc1f026d21c05c71e01295e55ab/coverage-7.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f88ae3e69df2ab62fb0bc5219a597cb890ba5c438190ffa87490b315190bb33", size = 248928, upload-time = "2025-12-08T13:12:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/f5/42/ffb5069b6fd1b95fae482e02f3fecf380d437dd5a39bae09f16d2e2e7e01/coverage-7.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4be718e51e86f553bcf515305a158a1cd180d23b72f07ae76d6017c3cc5d791", size = 245931, upload-time = "2025-12-08T13:12:22.243Z" }, + { url = "https://files.pythonhosted.org/packages/95/6e/73e809b882c2858f13e55c0c36e94e09ce07e6165d5644588f9517efe333/coverage-7.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a00d3a393207ae12f7c49bb1c113190883b500f48979abb118d8b72b8c95c032", size = 246968, upload-time = "2025-12-08T13:12:23.52Z" }, + { url = "https://files.pythonhosted.org/packages/87/08/64ebd9e64b6adb8b4a4662133d706fbaccecab972e0b3ccc23f64e2678ad/coverage-7.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a7b1cd820e1b6116f92c6128f1188e7afe421c7e1b35fa9836b11444e53ebd9", size = 244972, upload-time = "2025-12-08T13:12:24.781Z" }, + { url = "https://files.pythonhosted.org/packages/12/97/f4d27c6fe0cb375a5eced4aabcaef22de74766fb80a3d5d2015139e54b22/coverage-7.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:37eee4e552a65866f15dedd917d5e5f3d59805994260720821e2c1b51ac3248f", size = 245241, upload-time = "2025-12-08T13:12:28.041Z" }, + { url = "https://files.pythonhosted.org/packages/0c/94/42f8ae7f633bf4c118bf1038d80472f9dade88961a466f290b81250f7ab7/coverage-7.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62d7c4f13102148c78d7353c6052af6d899a7f6df66a32bddcc0c0eb7c5326f8", size = 245847, upload-time = "2025-12-08T13:12:29.337Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2f/6369ca22b6b6d933f4f4d27765d313d8914cc4cce84f82a16436b1a233db/coverage-7.13.0-cp310-cp310-win32.whl", hash = "sha256:24e4e56304fdb56f96f80eabf840eab043b3afea9348b88be680ec5986780a0f", size = 220573, upload-time = "2025-12-08T13:12:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dc/a6a741e519acceaeccc70a7f4cfe5d030efc4b222595f0677e101af6f1f3/coverage-7.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:74c136e4093627cf04b26a35dab8cbfc9b37c647f0502fc313376e11726ba303", size = 221509, upload-time = "2025-12-08T13:12:32.09Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dc/888bf90d8b1c3d0b4020a40e52b9f80957d75785931ec66c7dfaccc11c7d/coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820", size = 218104, upload-time = "2025-12-08T13:12:33.333Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ea/069d51372ad9c380214e86717e40d1a743713a2af191cfba30a0911b0a4a/coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f", size = 218606, upload-time = "2025-12-08T13:12:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/68/09/77b1c3a66c2aa91141b6c4471af98e5b1ed9b9e6d17255da5eb7992299e3/coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96", size = 248999, upload-time = "2025-12-08T13:12:36.02Z" }, + { url = "https://files.pythonhosted.org/packages/0a/32/2e2f96e9d5691eaf1181d9040f850b8b7ce165ea10810fd8e2afa534cef7/coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259", size = 250925, upload-time = "2025-12-08T13:12:37.221Z" }, + { url = "https://files.pythonhosted.org/packages/7b/45/b88ddac1d7978859b9a39a8a50ab323186148f1d64bc068f86fc77706321/coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb", size = 253032, upload-time = "2025-12-08T13:12:38.763Z" }, + { url = "https://files.pythonhosted.org/packages/71/cb/e15513f94c69d4820a34b6bf3d2b1f9f8755fa6021be97c7065442d7d653/coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9", size = 249134, upload-time = "2025-12-08T13:12:40.382Z" }, + { url = "https://files.pythonhosted.org/packages/09/61/d960ff7dc9e902af3310ce632a875aaa7860f36d2bc8fc8b37ee7c1b82a5/coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030", size = 250731, upload-time = "2025-12-08T13:12:41.992Z" }, + { url = "https://files.pythonhosted.org/packages/98/34/c7c72821794afc7c7c2da1db8f00c2c98353078aa7fb6b5ff36aac834b52/coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833", size = 248795, upload-time = "2025-12-08T13:12:43.331Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5b/e0f07107987a43b2def9aa041c614ddb38064cbf294a71ef8c67d43a0cdd/coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8", size = 248514, upload-time = "2025-12-08T13:12:44.546Z" }, + { url = "https://files.pythonhosted.org/packages/71/c2/c949c5d3b5e9fc6dd79e1b73cdb86a59ef14f3709b1d72bf7668ae12e000/coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753", size = 249424, upload-time = "2025-12-08T13:12:45.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/f1/bbc009abd6537cec0dffb2cc08c17a7f03de74c970e6302db4342a6e05af/coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b", size = 220597, upload-time = "2025-12-08T13:12:47.378Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/d9977f2fb51c10fbaed0718ce3d0a8541185290b981f73b1d27276c12d91/coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe", size = 221536, upload-time = "2025-12-08T13:12:48.7Z" }, + { url = "https://files.pythonhosted.org/packages/be/ad/3fcf43fd96fb43e337a3073dea63ff148dcc5c41ba7a14d4c7d34efb2216/coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7", size = 220206, upload-time = "2025-12-08T13:12:50.365Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f1/2619559f17f31ba00fc40908efd1fbf1d0a5536eb75dc8341e7d660a08de/coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf", size = 218274, upload-time = "2025-12-08T13:12:52.095Z" }, + { url = "https://files.pythonhosted.org/packages/2b/11/30d71ae5d6e949ff93b2a79a2c1b4822e00423116c5c6edfaeef37301396/coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f", size = 218638, upload-time = "2025-12-08T13:12:53.418Z" }, + { url = "https://files.pythonhosted.org/packages/79/c2/fce80fc6ded8d77e53207489d6065d0fed75db8951457f9213776615e0f5/coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb", size = 250129, upload-time = "2025-12-08T13:12:54.744Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b6/51b5d1eb6fcbb9a1d5d6984e26cbe09018475c2922d554fd724dd0f056ee/coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621", size = 252885, upload-time = "2025-12-08T13:12:56.401Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/972a5affea41de798691ab15d023d3530f9f56a72e12e243f35031846ff7/coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74", size = 253974, upload-time = "2025-12-08T13:12:57.718Z" }, + { url = "https://files.pythonhosted.org/packages/8a/56/116513aee860b2c7968aa3506b0f59b22a959261d1dbf3aea7b4450a7520/coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57", size = 250538, upload-time = "2025-12-08T13:12:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/d6/75/074476d64248fbadf16dfafbf93fdcede389ec821f74ca858d7c87d2a98c/coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8", size = 251912, upload-time = "2025-12-08T13:13:00.604Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d2/aa4f8acd1f7c06024705c12609d8698c51b27e4d635d717cd1934c9668e2/coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d", size = 250054, upload-time = "2025-12-08T13:13:01.892Z" }, + { url = "https://files.pythonhosted.org/packages/19/98/8df9e1af6a493b03694a1e8070e024e7d2cdc77adedc225a35e616d505de/coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b", size = 249619, upload-time = "2025-12-08T13:13:03.236Z" }, + { url = "https://files.pythonhosted.org/packages/d8/71/f8679231f3353018ca66ef647fa6fe7b77e6bff7845be54ab84f86233363/coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd", size = 251496, upload-time = "2025-12-08T13:13:04.511Z" }, + { url = "https://files.pythonhosted.org/packages/04/86/9cb406388034eaf3c606c22094edbbb82eea1fa9d20c0e9efadff20d0733/coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef", size = 220808, upload-time = "2025-12-08T13:13:06.422Z" }, + { url = "https://files.pythonhosted.org/packages/1c/59/af483673df6455795daf5f447c2f81a3d2fcfc893a22b8ace983791f6f34/coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae", size = 221616, upload-time = "2025-12-08T13:13:07.95Z" }, + { url = "https://files.pythonhosted.org/packages/64/b0/959d582572b30a6830398c60dd419c1965ca4b5fb38ac6b7093a0d50ca8d/coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080", size = 220261, upload-time = "2025-12-08T13:13:09.581Z" }, + { url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297, upload-time = "2025-12-08T13:13:10.977Z" }, + { url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673, upload-time = "2025-12-08T13:13:12.562Z" }, + { url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652, upload-time = "2025-12-08T13:13:13.909Z" }, + { url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251, upload-time = "2025-12-08T13:13:15.553Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492, upload-time = "2025-12-08T13:13:16.849Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850, upload-time = "2025-12-08T13:13:18.142Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633, upload-time = "2025-12-08T13:13:19.56Z" }, + { url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586, upload-time = "2025-12-08T13:13:20.883Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412, upload-time = "2025-12-08T13:13:22.22Z" }, + { url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191, upload-time = "2025-12-08T13:13:23.899Z" }, + { url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829, upload-time = "2025-12-08T13:13:25.182Z" }, + { url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640, upload-time = "2025-12-08T13:13:26.836Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269, upload-time = "2025-12-08T13:13:28.116Z" }, + { url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990, upload-time = "2025-12-08T13:13:29.463Z" }, + { url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340, upload-time = "2025-12-08T13:13:31.524Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638, upload-time = "2025-12-08T13:13:32.965Z" }, + { url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705, upload-time = "2025-12-08T13:13:34.378Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125, upload-time = "2025-12-08T13:13:35.73Z" }, + { url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844, upload-time = "2025-12-08T13:13:37.69Z" }, + { url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700, upload-time = "2025-12-08T13:13:39.525Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321, upload-time = "2025-12-08T13:13:41.172Z" }, + { url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222, upload-time = "2025-12-08T13:13:43.282Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411, upload-time = "2025-12-08T13:13:44.72Z" }, + { url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505, upload-time = "2025-12-08T13:13:46.332Z" }, + { url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569, upload-time = "2025-12-08T13:13:47.79Z" }, + { url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841, upload-time = "2025-12-08T13:13:49.243Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4b/9b54bedda55421449811dcd5263a2798a63f48896c24dfb92b0f1b0845bd/coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256", size = 218343, upload-time = "2025-12-08T13:13:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/59/df/c3a1f34d4bba2e592c8979f924da4d3d4598b0df2392fbddb7761258e3dc/coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a", size = 218672, upload-time = "2025-12-08T13:13:52.284Z" }, + { url = "https://files.pythonhosted.org/packages/07/62/eec0659e47857698645ff4e6ad02e30186eb8afd65214fd43f02a76537cb/coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9", size = 249715, upload-time = "2025-12-08T13:13:53.791Z" }, + { url = "https://files.pythonhosted.org/packages/23/2d/3c7ff8b2e0e634c1f58d095f071f52ed3c23ff25be524b0ccae8b71f99f8/coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19", size = 252225, upload-time = "2025-12-08T13:13:55.274Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ac/fb03b469d20e9c9a81093575003f959cf91a4a517b783aab090e4538764b/coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be", size = 253559, upload-time = "2025-12-08T13:13:57.161Z" }, + { url = "https://files.pythonhosted.org/packages/29/62/14afa9e792383c66cc0a3b872a06ded6e4ed1079c7d35de274f11d27064e/coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb", size = 249724, upload-time = "2025-12-08T13:13:58.692Z" }, + { url = "https://files.pythonhosted.org/packages/31/b7/333f3dab2939070613696ab3ee91738950f0467778c6e5a5052e840646b7/coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8", size = 251582, upload-time = "2025-12-08T13:14:00.642Z" }, + { url = "https://files.pythonhosted.org/packages/81/cb/69162bda9381f39b2287265d7e29ee770f7c27c19f470164350a38318764/coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b", size = 249538, upload-time = "2025-12-08T13:14:02.556Z" }, + { url = "https://files.pythonhosted.org/packages/e0/76/350387b56a30f4970abe32b90b2a434f87d29f8b7d4ae40d2e8a85aacfb3/coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9", size = 249349, upload-time = "2025-12-08T13:14:04.015Z" }, + { url = "https://files.pythonhosted.org/packages/86/0d/7f6c42b8d59f4c7e43ea3059f573c0dcfed98ba46eb43c68c69e52ae095c/coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927", size = 251011, upload-time = "2025-12-08T13:14:05.505Z" }, + { url = "https://files.pythonhosted.org/packages/d7/f1/4bb2dff379721bb0b5c649d5c5eaf438462cad824acf32eb1b7ca0c7078e/coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f", size = 221091, upload-time = "2025-12-08T13:14:07.127Z" }, + { url = "https://files.pythonhosted.org/packages/ba/44/c239da52f373ce379c194b0ee3bcc121020e397242b85f99e0afc8615066/coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc", size = 221904, upload-time = "2025-12-08T13:14:08.542Z" }, + { url = "https://files.pythonhosted.org/packages/89/1f/b9f04016d2a29c2e4a0307baefefad1a4ec5724946a2b3e482690486cade/coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b", size = 220480, upload-time = "2025-12-08T13:14:10.958Z" }, + { url = "https://files.pythonhosted.org/packages/16/d4/364a1439766c8e8647860584171c36010ca3226e6e45b1753b1b249c5161/coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28", size = 219074, upload-time = "2025-12-08T13:14:13.345Z" }, + { url = "https://files.pythonhosted.org/packages/ce/f4/71ba8be63351e099911051b2089662c03d5671437a0ec2171823c8e03bec/coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe", size = 219342, upload-time = "2025-12-08T13:14:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/5e/25/127d8ed03d7711a387d96f132589057213e3aef7475afdaa303412463f22/coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657", size = 260713, upload-time = "2025-12-08T13:14:16.907Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/559fbb6def07d25b2243663b46ba9eb5a3c6586c0c6f4e62980a68f0ee1c/coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff", size = 262825, upload-time = "2025-12-08T13:14:18.68Z" }, + { url = "https://files.pythonhosted.org/packages/37/99/6ee5bf7eff884766edb43bd8736b5e1c5144d0fe47498c3779326fe75a35/coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3", size = 265233, upload-time = "2025-12-08T13:14:20.55Z" }, + { url = "https://files.pythonhosted.org/packages/d8/90/92f18fe0356ea69e1f98f688ed80cec39f44e9f09a1f26a1bbf017cc67f2/coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b", size = 259779, upload-time = "2025-12-08T13:14:22.367Z" }, + { url = "https://files.pythonhosted.org/packages/90/5d/b312a8b45b37a42ea7d27d7d3ff98ade3a6c892dd48d1d503e773503373f/coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d", size = 262700, upload-time = "2025-12-08T13:14:24.309Z" }, + { url = "https://files.pythonhosted.org/packages/63/f8/b1d0de5c39351eb71c366f872376d09386640840a2e09b0d03973d791e20/coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e", size = 260302, upload-time = "2025-12-08T13:14:26.068Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7c/d42f4435bc40c55558b3109a39e2d456cddcec37434f62a1f1230991667a/coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940", size = 259136, upload-time = "2025-12-08T13:14:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d3/23413241dc04d47cfe19b9a65b32a2edd67ecd0b817400c2843ebc58c847/coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2", size = 261467, upload-time = "2025-12-08T13:14:29.09Z" }, + { url = "https://files.pythonhosted.org/packages/13/e6/6e063174500eee216b96272c0d1847bf215926786f85c2bd024cf4d02d2f/coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7", size = 221875, upload-time = "2025-12-08T13:14:31.106Z" }, + { url = "https://files.pythonhosted.org/packages/3b/46/f4fb293e4cbe3620e3ac2a3e8fd566ed33affb5861a9b20e3dd6c1896cbc/coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc", size = 222982, upload-time = "2025-12-08T13:14:33.1Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/5b3b9018215ed9733fbd1ae3b2ed75c5de62c3b55377a52cae732e1b7805/coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a", size = 221016, upload-time = "2025-12-08T13:14:34.601Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, +] + +[[package]] +name = "faker" +version = "37.12.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "tzdata", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/84/e95acaa848b855e15c83331d0401ee5f84b2f60889255c2e055cb4fb6bdf/faker-37.12.0.tar.gz", hash = "sha256:7505e59a7e02fa9010f06c3e1e92f8250d4cfbb30632296140c2d6dbef09b0fa", size = 1935741, upload-time = "2025-10-24T15:19:58.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/98/2c050dec90e295a524c9b65c4cb9e7c302386a296b2938710448cbd267d5/faker-37.12.0-py3-none-any.whl", hash = "sha256:afe7ccc038da92f2fbae30d8e16d19d91e92e242f8401ce9caf44de892bab4c4", size = 1975461, upload-time = "2025-10-24T15:19:55.739Z" }, +] + +[[package]] +name = "faker" +version = "38.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "tzdata", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/27/022d4dbd4c20567b4c294f79a133cc2f05240ea61e0d515ead18c995c249/faker-38.2.0.tar.gz", hash = "sha256:20672803db9c7cb97f9b56c18c54b915b6f1d8991f63d1d673642dc43f5ce7ab", size = 1941469, upload-time = "2025-11-19T16:37:31.892Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/93/00c94d45f55c336434a15f98d906387e87ce28f9918e4444829a8fda432d/faker-38.2.0-py3-none-any.whl", hash = "sha256:35fe4a0a79dee0dc4103a6083ee9224941e7d3594811a50e3969e547b0d2ee65", size = 1980505, upload-time = "2025-11-19T16:37:30.208Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "isort" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/82/fa43935523efdfcce6abbae9da7f372b627b27142c3419fcf13bf5b0c397/isort-6.1.0.tar.gz", hash = "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481", size = 824325, upload-time = "2025-10-01T16:26:45.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/cc/9b681a170efab4868a032631dea1e8446d8ec718a7f657b94d49d1a12643/isort-6.1.0-py3-none-any.whl", hash = "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784", size = 94329, upload-time = "2025-10-01T16:26:43.291Z" }, +] + +[[package]] +name = "isort" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, +] + +[[package]] +name = "librt" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/d9/6f3d3fcf5e5543ed8a60cc70fa7d50508ed60b8a10e9af6d2058159ab54e/librt-0.7.3.tar.gz", hash = "sha256:3ec50cf65235ff5c02c5b747748d9222e564ad48597122a361269dd3aa808798", size = 144549, upload-time = "2025-12-06T19:04:45.553Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/66/79a14e672256ef58144a24eb49adb338ec02de67ff4b45320af6504682ab/librt-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2682162855a708e3270eba4b92026b93f8257c3e65278b456c77631faf0f4f7a", size = 54707, upload-time = "2025-12-06T19:03:10.881Z" }, + { url = "https://files.pythonhosted.org/packages/58/fa/b709c65a9d5eab85f7bcfe0414504d9775aaad6e78727a0327e175474caa/librt-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:440c788f707c061d237c1e83edf6164ff19f5c0f823a3bf054e88804ebf971ec", size = 56670, upload-time = "2025-12-06T19:03:12.107Z" }, + { url = "https://files.pythonhosted.org/packages/3a/56/0685a0772ec89ddad4c00e6b584603274c3d818f9a68e2c43c4eb7b39ee9/librt-0.7.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399938edbd3d78339f797d685142dd8a623dfaded023cf451033c85955e4838a", size = 161045, upload-time = "2025-12-06T19:03:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d9/863ada0c5ce48aefb89df1555e392b2209fcb6daee4c153c031339b9a89b/librt-0.7.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1975eda520957c6e0eb52d12968dd3609ffb7eef05d4223d097893d6daf1d8a7", size = 169532, upload-time = "2025-12-06T19:03:14.699Z" }, + { url = "https://files.pythonhosted.org/packages/68/a0/71da6c8724fd16c31749905ef1c9e11de206d9301b5be984bf2682b4efb3/librt-0.7.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9da128d0edf990cf0d2ca011b02cd6f639e79286774bd5b0351245cbb5a6e51", size = 183277, upload-time = "2025-12-06T19:03:16.446Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/9c97bf2f8338ba1914de233ea312bba2bbd7c59f43f807b3e119796bab18/librt-0.7.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19acfde38cb532a560b98f473adc741c941b7a9bc90f7294bc273d08becb58b", size = 179045, upload-time = "2025-12-06T19:03:17.838Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b1/ceea067f489e904cb4ddcca3c9b06ba20229bc3fa7458711e24a5811f162/librt-0.7.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b4f57f7a0c65821c5441d98c47ff7c01d359b1e12328219709bdd97fdd37f90", size = 173521, upload-time = "2025-12-06T19:03:19.17Z" }, + { url = "https://files.pythonhosted.org/packages/7a/41/6cb18f5da9c89ed087417abb0127a445a50ad4eaf1282ba5b52588187f47/librt-0.7.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:256793988bff98040de23c57cf36e1f4c2f2dc3dcd17537cdac031d3b681db71", size = 193592, upload-time = "2025-12-06T19:03:20.637Z" }, + { url = "https://files.pythonhosted.org/packages/4c/3c/fcef208746584e7c78584b7aedc617130c4a4742cb8273361bbda8b183b5/librt-0.7.3-cp310-cp310-win32.whl", hash = "sha256:fcb72249ac4ea81a7baefcbff74df7029c3cb1cf01a711113fa052d563639c9c", size = 47201, upload-time = "2025-12-06T19:03:21.764Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bf/d8a6c35d1b2b789a4df9b3ddb1c8f535ea373fde2089698965a8f0d62138/librt-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:4887c29cadbdc50640179e3861c276325ff2986791e6044f73136e6e798ff806", size = 54371, upload-time = "2025-12-06T19:03:23.231Z" }, + { url = "https://files.pythonhosted.org/packages/21/e6/f6391f5c6f158d31ed9af6bd1b1bcd3ffafdea1d816bc4219d0d90175a7f/librt-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:687403cced6a29590e6be6964463835315905221d797bc5c934a98750fe1a9af", size = 54711, upload-time = "2025-12-06T19:03:24.6Z" }, + { url = "https://files.pythonhosted.org/packages/ab/1b/53c208188c178987c081560a0fcf36f5ca500d5e21769596c845ef2f40d4/librt-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24d70810f6e2ea853ff79338001533716b373cc0f63e2a0be5bc96129edb5fb5", size = 56664, upload-time = "2025-12-06T19:03:25.969Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/d9da832b9a1e5f8366e8a044ec80217945385b26cb89fd6f94bfdc7d80b0/librt-0.7.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf8c7735fbfc0754111f00edda35cf9e98a8d478de6c47b04eaa9cef4300eaa7", size = 161701, upload-time = "2025-12-06T19:03:27.035Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/1e0a7aba15e78529dd21f233076b876ee58c8b8711b1793315bdd3b263b0/librt-0.7.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32d43610dff472eab939f4d7fbdd240d1667794192690433672ae22d7af8445", size = 171040, upload-time = "2025-12-06T19:03:28.482Z" }, + { url = "https://files.pythonhosted.org/packages/69/46/3cfa325c1c2bc25775ec6ec1718cfbec9cff4ac767d37d2d3a2d1cc6f02c/librt-0.7.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:adeaa886d607fb02563c1f625cf2ee58778a2567c0c109378da8f17ec3076ad7", size = 184720, upload-time = "2025-12-06T19:03:29.599Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/e4553433d7ac47f4c75d0a7e59b13aee0e08e88ceadbee356527a9629b0a/librt-0.7.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:572a24fc5958c61431da456a0ef1eeea6b4989d81eeb18b8e5f1f3077592200b", size = 180731, upload-time = "2025-12-06T19:03:31.201Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/51cd73006232981a3106d4081fbaa584ac4e27b49bc02266468d3919db03/librt-0.7.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6488e69d408b492e08bfb68f20c4a899a354b4386a446ecd490baff8d0862720", size = 174565, upload-time = "2025-12-06T19:03:32.818Z" }, + { url = "https://files.pythonhosted.org/packages/42/54/0578a78b587e5aa22486af34239a052c6366835b55fc307bc64380229e3f/librt-0.7.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed028fc3d41adda916320712838aec289956c89b4f0a361ceadf83a53b4c047a", size = 195247, upload-time = "2025-12-06T19:03:34.434Z" }, + { url = "https://files.pythonhosted.org/packages/b5/0a/ee747cd999753dd9447e50b98fc36ee433b6c841a42dbf6d47b64b32a56e/librt-0.7.3-cp311-cp311-win32.whl", hash = "sha256:2cf9d73499486ce39eebbff5f42452518cc1f88d8b7ea4a711ab32962b176ee2", size = 47514, upload-time = "2025-12-06T19:03:35.959Z" }, + { url = "https://files.pythonhosted.org/packages/ec/af/8b13845178dec488e752878f8e290f8f89e7e34ae1528b70277aa1a6dd1e/librt-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:35f1609e3484a649bb80431310ddbec81114cd86648f1d9482bc72a3b86ded2e", size = 54695, upload-time = "2025-12-06T19:03:36.956Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/ae59578501b1a25850266778f59279f4f3e726acc5c44255bfcb07b4bc57/librt-0.7.3-cp311-cp311-win_arm64.whl", hash = "sha256:550fdbfbf5bba6a2960b27376ca76d6aaa2bd4b1a06c4255edd8520c306fcfc0", size = 48142, upload-time = "2025-12-06T19:03:38.263Z" }, + { url = "https://files.pythonhosted.org/packages/29/90/ed8595fa4e35b6020317b5ea8d226a782dcbac7a997c19ae89fb07a41c66/librt-0.7.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fa9ac2e49a6bee56e47573a6786cb635e128a7b12a0dc7851090037c0d397a3", size = 55687, upload-time = "2025-12-06T19:03:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f6/6a20702a07b41006cb001a759440cb6b5362530920978f64a2b2ae2bf729/librt-0.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e980cf1ed1a2420a6424e2ed884629cdead291686f1048810a817de07b5eb18", size = 57127, upload-time = "2025-12-06T19:03:40.3Z" }, + { url = "https://files.pythonhosted.org/packages/79/f3/b0c4703d5ffe9359b67bb2ccb86c42d4e930a363cfc72262ac3ba53cff3e/librt-0.7.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e094e445c37c57e9ec612847812c301840239d34ccc5d153a982fa9814478c60", size = 165336, upload-time = "2025-12-06T19:03:41.369Z" }, + { url = "https://files.pythonhosted.org/packages/02/69/3ba05b73ab29ccbe003856232cea4049769be5942d799e628d1470ed1694/librt-0.7.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aca73d70c3f553552ba9133d4a09e767dcfeee352d8d8d3eb3f77e38a3beb3ed", size = 174237, upload-time = "2025-12-06T19:03:42.44Z" }, + { url = "https://files.pythonhosted.org/packages/22/ad/d7c2671e7bf6c285ef408aa435e9cd3fdc06fd994601e1f2b242df12034f/librt-0.7.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c634a0a6db395fdaba0361aa78395597ee72c3aad651b9a307a3a7eaf5efd67e", size = 189017, upload-time = "2025-12-06T19:03:44.01Z" }, + { url = "https://files.pythonhosted.org/packages/f4/94/d13f57193148004592b618555f296b41d2d79b1dc814ff8b3273a0bf1546/librt-0.7.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a59a69deeb458c858b8fea6acf9e2acd5d755d76cd81a655256bc65c20dfff5b", size = 183983, upload-time = "2025-12-06T19:03:45.834Z" }, + { url = "https://files.pythonhosted.org/packages/02/10/b612a9944ebd39fa143c7e2e2d33f2cb790205e025ddd903fb509a3a3bb3/librt-0.7.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d91e60ac44bbe3a77a67af4a4c13114cbe9f6d540337ce22f2c9eaf7454ca71f", size = 177602, upload-time = "2025-12-06T19:03:46.944Z" }, + { url = "https://files.pythonhosted.org/packages/1f/48/77bc05c4cc232efae6c5592c0095034390992edbd5bae8d6cf1263bb7157/librt-0.7.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:703456146dc2bf430f7832fd1341adac5c893ec3c1430194fdcefba00012555c", size = 199282, upload-time = "2025-12-06T19:03:48.069Z" }, + { url = "https://files.pythonhosted.org/packages/12/aa/05916ccd864227db1ffec2a303ae34f385c6b22d4e7ce9f07054dbcf083c/librt-0.7.3-cp312-cp312-win32.whl", hash = "sha256:b7c1239b64b70be7759554ad1a86288220bbb04d68518b527783c4ad3fb4f80b", size = 47879, upload-time = "2025-12-06T19:03:49.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/92/7f41c42d31ea818b3c4b9cc1562e9714bac3c676dd18f6d5dd3d0f2aa179/librt-0.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:ef59c938f72bdbc6ab52dc50f81d0637fde0f194b02d636987cea2ab30f8f55a", size = 54972, upload-time = "2025-12-06T19:03:50.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/dc/53582bbfb422311afcbc92adb75711f04e989cec052f08ec0152fbc36c9c/librt-0.7.3-cp312-cp312-win_arm64.whl", hash = "sha256:ff21c554304e8226bf80c3a7754be27c6c3549a9fec563a03c06ee8f494da8fc", size = 48338, upload-time = "2025-12-06T19:03:51.431Z" }, + { url = "https://files.pythonhosted.org/packages/93/7d/e0ce1837dfb452427db556e6d4c5301ba3b22fe8de318379fbd0593759b9/librt-0.7.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56f2a47beda8409061bc1c865bef2d4bd9ff9255219402c0817e68ab5ad89aed", size = 55742, upload-time = "2025-12-06T19:03:52.459Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/3564262301e507e1d5cf31c7d84cb12addf0d35e05ba53312494a2eba9a4/librt-0.7.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14569ac5dd38cfccf0a14597a88038fb16811a6fede25c67b79c6d50fc2c8fdc", size = 57163, upload-time = "2025-12-06T19:03:53.516Z" }, + { url = "https://files.pythonhosted.org/packages/be/ac/245e72b7e443d24a562f6047563c7f59833384053073ef9410476f68505b/librt-0.7.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6038ccbd5968325a5d6fd393cf6e00b622a8de545f0994b89dd0f748dcf3e19e", size = 165840, upload-time = "2025-12-06T19:03:54.918Z" }, + { url = "https://files.pythonhosted.org/packages/98/af/587e4491f40adba066ba39a450c66bad794c8d92094f936a201bfc7c2b5f/librt-0.7.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d39079379a9a28e74f4d57dc6357fa310a1977b51ff12239d7271ec7e71d67f5", size = 174827, upload-time = "2025-12-06T19:03:56.082Z" }, + { url = "https://files.pythonhosted.org/packages/78/21/5b8c60ea208bc83dd00421022a3874330685d7e856404128dc3728d5d1af/librt-0.7.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8837d5a52a2d7aa9f4c3220a8484013aed1d8ad75240d9a75ede63709ef89055", size = 189612, upload-time = "2025-12-06T19:03:57.507Z" }, + { url = "https://files.pythonhosted.org/packages/da/2f/8b819169ef696421fb81cd04c6cdf225f6e96f197366001e9d45180d7e9e/librt-0.7.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:399bbd7bcc1633c3e356ae274a1deb8781c7bf84d9c7962cc1ae0c6e87837292", size = 184584, upload-time = "2025-12-06T19:03:58.686Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fc/af9d225a9395b77bd7678362cb055d0b8139c2018c37665de110ca388022/librt-0.7.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8d8cf653e798ee4c4e654062b633db36984a1572f68c3aa25e364a0ddfbbb910", size = 178269, upload-time = "2025-12-06T19:03:59.769Z" }, + { url = "https://files.pythonhosted.org/packages/6c/d8/7b4fa1683b772966749d5683aa3fd605813defffe157833a8fa69cc89207/librt-0.7.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2f03484b54bf4ae80ab2e504a8d99d20d551bfe64a7ec91e218010b467d77093", size = 199852, upload-time = "2025-12-06T19:04:00.901Z" }, + { url = "https://files.pythonhosted.org/packages/77/e8/4598413aece46ca38d9260ef6c51534bd5f34b5c21474fcf210ce3a02123/librt-0.7.3-cp313-cp313-win32.whl", hash = "sha256:44b3689b040df57f492e02cd4f0bacd1b42c5400e4b8048160c9d5e866de8abe", size = 47936, upload-time = "2025-12-06T19:04:02.054Z" }, + { url = "https://files.pythonhosted.org/packages/af/80/ac0e92d5ef8c6791b3e2c62373863827a279265e0935acdf807901353b0e/librt-0.7.3-cp313-cp313-win_amd64.whl", hash = "sha256:6b407c23f16ccc36614c136251d6b32bf30de7a57f8e782378f1107be008ddb0", size = 54965, upload-time = "2025-12-06T19:04:03.224Z" }, + { url = "https://files.pythonhosted.org/packages/f1/fd/042f823fcbff25c1449bb4203a29919891ca74141b68d3a5f6612c4ce283/librt-0.7.3-cp313-cp313-win_arm64.whl", hash = "sha256:abfc57cab3c53c4546aee31859ef06753bfc136c9d208129bad23e2eca39155a", size = 48350, upload-time = "2025-12-06T19:04:04.234Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ae/c6ecc7bb97134a71b5241e8855d39964c0e5f4d96558f0d60593892806d2/librt-0.7.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:120dd21d46ff875e849f1aae19346223cf15656be489242fe884036b23d39e93", size = 55175, upload-time = "2025-12-06T19:04:05.308Z" }, + { url = "https://files.pythonhosted.org/packages/cf/bc/2cc0cb0ab787b39aa5c7645cd792433c875982bdf12dccca558b89624594/librt-0.7.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1617bea5ab31266e152871208502ee943cb349c224846928a1173c864261375e", size = 56881, upload-time = "2025-12-06T19:04:06.674Z" }, + { url = "https://files.pythonhosted.org/packages/8e/87/397417a386190b70f5bf26fcedbaa1515f19dce33366e2684c6b7ee83086/librt-0.7.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93b2a1f325fefa1482516ced160c8c7b4b8d53226763fa6c93d151fa25164207", size = 163710, upload-time = "2025-12-06T19:04:08.437Z" }, + { url = "https://files.pythonhosted.org/packages/c9/37/7338f85b80e8a17525d941211451199845093ca242b32efbf01df8531e72/librt-0.7.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d4801db8354436fd3936531e7f0e4feb411f62433a6b6cb32bb416e20b529f", size = 172471, upload-time = "2025-12-06T19:04:10.124Z" }, + { url = "https://files.pythonhosted.org/packages/3b/e0/741704edabbfae2c852fedc1b40d9ed5a783c70ed3ed8e4fe98f84b25d13/librt-0.7.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11ad45122bbed42cfc8b0597450660126ef28fd2d9ae1a219bc5af8406f95678", size = 186804, upload-time = "2025-12-06T19:04:11.586Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d1/0a82129d6ba242f3be9af34815be089f35051bc79619f5c27d2c449ecef6/librt-0.7.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6b4e7bff1d76dd2b46443078519dc75df1b5e01562345f0bb740cea5266d8218", size = 181817, upload-time = "2025-12-06T19:04:12.802Z" }, + { url = "https://files.pythonhosted.org/packages/4f/32/704f80bcf9979c68d4357c46f2af788fbf9d5edda9e7de5786ed2255e911/librt-0.7.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:d86f94743a11873317094326456b23f8a5788bad9161fd2f0e52088c33564620", size = 175602, upload-time = "2025-12-06T19:04:14.004Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6d/4355cfa0fae0c062ba72f541d13db5bc575770125a7ad3d4f46f4109d305/librt-0.7.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:754a0d09997095ad764ccef050dd5bf26cbf457aab9effcba5890dad081d879e", size = 196497, upload-time = "2025-12-06T19:04:15.487Z" }, + { url = "https://files.pythonhosted.org/packages/2e/eb/ac6d8517d44209e5a712fde46f26d0055e3e8969f24d715f70bd36056230/librt-0.7.3-cp314-cp314-win32.whl", hash = "sha256:fbd7351d43b80d9c64c3cfcb50008f786cc82cba0450e8599fdd64f264320bd3", size = 44678, upload-time = "2025-12-06T19:04:16.688Z" }, + { url = "https://files.pythonhosted.org/packages/e9/93/238f026d141faf9958da588c761a0812a1a21c98cc54a76f3608454e4e59/librt-0.7.3-cp314-cp314-win_amd64.whl", hash = "sha256:d376a35c6561e81d2590506804b428fc1075fcc6298fc5bb49b771534c0ba010", size = 51689, upload-time = "2025-12-06T19:04:17.726Z" }, + { url = "https://files.pythonhosted.org/packages/52/44/43f462ad9dcf9ed7d3172fe2e30d77b980956250bd90e9889a9cca93df2a/librt-0.7.3-cp314-cp314-win_arm64.whl", hash = "sha256:cbdb3f337c88b43c3b49ca377731912c101178be91cb5071aac48faa898e6f8e", size = 44662, upload-time = "2025-12-06T19:04:18.771Z" }, + { url = "https://files.pythonhosted.org/packages/1d/35/fed6348915f96b7323241de97f26e2af481e95183b34991df12fd5ce31b1/librt-0.7.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9f0e0927efe87cd42ad600628e595a1a0aa1c64f6d0b55f7e6059079a428641a", size = 57347, upload-time = "2025-12-06T19:04:19.812Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f2/045383ccc83e3fea4fba1b761796584bc26817b6b2efb6b8a6731431d16f/librt-0.7.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:020c6db391268bcc8ce75105cb572df8cb659a43fd347366aaa407c366e5117a", size = 59223, upload-time = "2025-12-06T19:04:20.862Z" }, + { url = "https://files.pythonhosted.org/packages/77/3f/c081f8455ab1d7f4a10dbe58463ff97119272ff32494f21839c3b9029c2c/librt-0.7.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7af7785f5edd1f418da09a8cdb9ec84b0213e23d597413e06525340bcce1ea4f", size = 183861, upload-time = "2025-12-06T19:04:21.963Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f5/73c5093c22c31fbeaebc25168837f05ebfd8bf26ce00855ef97a5308f36f/librt-0.7.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8ccadf260bb46a61b9c7e89e2218f6efea9f3eeaaab4e3d1f58571890e54858e", size = 194594, upload-time = "2025-12-06T19:04:23.14Z" }, + { url = "https://files.pythonhosted.org/packages/78/b8/d5f17d4afe16612a4a94abfded94c16c5a033f183074fb130dfe56fc1a42/librt-0.7.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9883b2d819ce83f87ba82a746c81d14ada78784db431e57cc9719179847376e", size = 206759, upload-time = "2025-12-06T19:04:24.328Z" }, + { url = "https://files.pythonhosted.org/packages/36/2e/021765c1be85ee23ffd5b5b968bb4cba7526a4db2a0fc27dcafbdfc32da7/librt-0.7.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:59cb0470612d21fa1efddfa0dd710756b50d9c7fb6c1236bbf8ef8529331dc70", size = 203210, upload-time = "2025-12-06T19:04:25.544Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/9923656e42da4fd18c594bd08cf6d7e152d4158f8b808e210d967f0dcceb/librt-0.7.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:1fe603877e1865b5fd047a5e40379509a4a60204aa7aa0f72b16f7a41c3f0712", size = 196708, upload-time = "2025-12-06T19:04:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/fc/0b/0708b886ac760e64d6fbe7e16024e4be3ad1a3629d19489a97e9cf4c3431/librt-0.7.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5460d99ed30f043595bbdc888f542bad2caeb6226b01c33cda3ae444e8f82d42", size = 217212, upload-time = "2025-12-06T19:04:27.892Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7f/12a73ff17bca4351e73d585dd9ebf46723c4a8622c4af7fe11a2e2d011ff/librt-0.7.3-cp314-cp314t-win32.whl", hash = "sha256:d09f677693328503c9e492e33e9601464297c01f9ebd966ea8fc5308f3069bfd", size = 45586, upload-time = "2025-12-06T19:04:29.116Z" }, + { url = "https://files.pythonhosted.org/packages/e2/df/8decd032ac9b995e4f5606cde783711a71094128d88d97a52e397daf2c89/librt-0.7.3-cp314-cp314t-win_amd64.whl", hash = "sha256:25711f364c64cab2c910a0247e90b51421e45dbc8910ceeb4eac97a9e132fc6f", size = 53002, upload-time = "2025-12-06T19:04:30.173Z" }, + { url = "https://files.pythonhosted.org/packages/de/0c/6605b6199de8178afe7efc77ca1d8e6db00453bc1d3349d27605c0f42104/librt-0.7.3-cp314-cp314t-win_arm64.whl", hash = "sha256:a9f9b661f82693eb56beb0605156c7fca57f535704ab91837405913417d6990b", size = 45647, upload-time = "2025-12-06T19:04:31.302Z" }, + { url = "https://files.pythonhosted.org/packages/e1/70/b3f19e3bb34f44e218c8271dc0b2b14eb6b183fbccbececf94c71e2b5e69/librt-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd8551aa21df6c60baa2624fd086ae7486bdde00c44097b32e1d1b1966e365e0", size = 54850, upload-time = "2025-12-06T19:04:32.742Z" }, + { url = "https://files.pythonhosted.org/packages/a0/97/6599ed7726aaa9b5bacea206d5861b94e76866240e2f394a59594bf3db46/librt-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6eb9295c730e26b849ed1f4022735f36863eb46b14b6e10604c1c39b8b5efaea", size = 56797, upload-time = "2025-12-06T19:04:34.193Z" }, + { url = "https://files.pythonhosted.org/packages/33/83/216db13224a6f688787f456909bbc50f9d951c0f4bea8ba38a2eb931d581/librt-0.7.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3edbf257c40d21a42615e9e332a6b10a8bacaaf58250aed8552a14a70efd0d65", size = 159681, upload-time = "2025-12-06T19:04:35.554Z" }, + { url = "https://files.pythonhosted.org/packages/83/23/0a490c8ba3bc90090647ac7b9b3c63c16af7378bcabe3ff4c7d7890d66e5/librt-0.7.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b29e97273bd6999e2bfe9fe3531b1f4f64effd28327bced048a33e49b99674a", size = 168505, upload-time = "2025-12-06T19:04:36.748Z" }, + { url = "https://files.pythonhosted.org/packages/5e/16/b47c60805285caa06728d61d933fdd6db5b7321f375ce496cb7fdbeb1a44/librt-0.7.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e40520c37926166c24d0c2e0f3bc3a5f46646c34bdf7b4ea9747c297d6ee809", size = 182234, upload-time = "2025-12-06T19:04:37.889Z" }, + { url = "https://files.pythonhosted.org/packages/2d/2f/bef211d7f0d55fa2484d2c644b2cdae8c9c5eec050754b0516e6582ad452/librt-0.7.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6bdd9adfca615903578d2060ee8a6eb1c24eaf54919ff0ddc820118e5718931b", size = 178276, upload-time = "2025-12-06T19:04:39.408Z" }, + { url = "https://files.pythonhosted.org/packages/3d/dd/5a3e7762b086b62fabb31fd4deaaf3ba888cfdd3b8f2e3247f076c18a6ff/librt-0.7.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f57aca20e637750a2c18d979f7096e2c2033cc40cf7ed201494318de1182f135", size = 172602, upload-time = "2025-12-06T19:04:40.619Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d8/533d5bfd5b377eb03ed54101814b530fc1f9bbe0e79971c641a3f15bfb33/librt-0.7.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cad9971881e4fec00d96af7eaf4b63aa7a595696fc221808b0d3ce7ca9743258", size = 192741, upload-time = "2025-12-06T19:04:41.738Z" }, + { url = "https://files.pythonhosted.org/packages/9f/69/0b87ce8e95f65ebc864f390f1139b8fe9fac6fb64b797307447b1719610c/librt-0.7.3-cp39-cp39-win32.whl", hash = "sha256:170cdb8436188347af17bf9cccf3249ba581c933ed56d926497119d4cf730cec", size = 47154, upload-time = "2025-12-06T19:04:42.96Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1c/070dee0add2d6e742be4d8b965d5a37c24562b43e8ef7deba8ed5b5d3c0f/librt-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:b278a9248a4e3260fee3db7613772ca9ab6763a129d6d6f29555e2f9b168216d", size = 54339, upload-time = "2025-12-06T19:04:44.415Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/b5/b58cdc25fadd424552804bf410855d52324183112aa004f0732c5f6324cf/mypy-1.19.0.tar.gz", hash = "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528", size = 3579025, upload-time = "2025-11-28T15:49:01.26Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/8f/55fb488c2b7dabd76e3f30c10f7ab0f6190c1fcbc3e97b1e588ec625bbe2/mypy-1.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6148ede033982a8c5ca1143de34c71836a09f105068aaa8b7d5edab2b053e6c8", size = 13093239, upload-time = "2025-11-28T15:45:11.342Z" }, + { url = "https://files.pythonhosted.org/packages/72/1b/278beea978456c56b3262266274f335c3ba5ff2c8108b3b31bec1ffa4c1d/mypy-1.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a9ac09e52bb0f7fb912f5d2a783345c72441a08ef56ce3e17c1752af36340a39", size = 12156128, upload-time = "2025-11-28T15:46:02.566Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/e06f951902e136ff74fd7a4dc4ef9d884faeb2f8eb9c49461235714f079f/mypy-1.19.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f7254c15ab3f8ed68f8e8f5cbe88757848df793e31c36aaa4d4f9783fd08ab", size = 12753508, upload-time = "2025-11-28T15:44:47.538Z" }, + { url = "https://files.pythonhosted.org/packages/67/5a/d035c534ad86e09cee274d53cf0fd769c0b29ca6ed5b32e205be3c06878c/mypy-1.19.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318ba74f75899b0e78b847d8c50821e4c9637c79d9a59680fc1259f29338cb3e", size = 13507553, upload-time = "2025-11-28T15:44:39.26Z" }, + { url = "https://files.pythonhosted.org/packages/6a/17/c4a5498e00071ef29e483a01558b285d086825b61cf1fb2629fbdd019d94/mypy-1.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf7d84f497f78b682edd407f14a7b6e1a2212b433eedb054e2081380b7395aa3", size = 13792898, upload-time = "2025-11-28T15:44:31.102Z" }, + { url = "https://files.pythonhosted.org/packages/67/f6/bb542422b3ee4399ae1cdc463300d2d91515ab834c6233f2fd1d52fa21e0/mypy-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:c3385246593ac2b97f155a0e9639be906e73534630f663747c71908dfbf26134", size = 10048835, upload-time = "2025-11-28T15:48:15.744Z" }, + { url = "https://files.pythonhosted.org/packages/0f/d2/010fb171ae5ac4a01cc34fbacd7544531e5ace95c35ca166dd8fd1b901d0/mypy-1.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a31e4c28e8ddb042c84c5e977e28a21195d086aaffaf08b016b78e19c9ef8106", size = 13010563, upload-time = "2025-11-28T15:48:23.975Z" }, + { url = "https://files.pythonhosted.org/packages/41/6b/63f095c9f1ce584fdeb595d663d49e0980c735a1d2004720ccec252c5d47/mypy-1.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34ec1ac66d31644f194b7c163d7f8b8434f1b49719d403a5d26c87fff7e913f7", size = 12077037, upload-time = "2025-11-28T15:47:51.582Z" }, + { url = "https://files.pythonhosted.org/packages/d7/83/6cb93d289038d809023ec20eb0b48bbb1d80af40511fa077da78af6ff7c7/mypy-1.19.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb64b0ba5980466a0f3f9990d1c582bcab8db12e29815ecb57f1408d99b4bff7", size = 12680255, upload-time = "2025-11-28T15:46:57.628Z" }, + { url = "https://files.pythonhosted.org/packages/99/db/d217815705987d2cbace2edd9100926196d6f85bcb9b5af05058d6e3c8ad/mypy-1.19.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:120cffe120cca5c23c03c77f84abc0c14c5d2e03736f6c312480020082f1994b", size = 13421472, upload-time = "2025-11-28T15:47:59.655Z" }, + { url = "https://files.pythonhosted.org/packages/4e/51/d2beaca7c497944b07594f3f8aad8d2f0e8fc53677059848ae5d6f4d193e/mypy-1.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7a500ab5c444268a70565e374fc803972bfd1f09545b13418a5174e29883dab7", size = 13651823, upload-time = "2025-11-28T15:45:29.318Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d1/7883dcf7644db3b69490f37b51029e0870aac4a7ad34d09ceae709a3df44/mypy-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:c14a98bc63fd867530e8ec82f217dae29d0550c86e70debc9667fff1ec83284e", size = 10049077, upload-time = "2025-11-28T15:45:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/11/7e/1afa8fb188b876abeaa14460dc4983f909aaacaa4bf5718c00b2c7e0b3d5/mypy-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d", size = 13207728, upload-time = "2025-11-28T15:46:26.463Z" }, + { url = "https://files.pythonhosted.org/packages/b2/13/f103d04962bcbefb1644f5ccb235998b32c337d6c13145ea390b9da47f3e/mypy-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760", size = 12202945, upload-time = "2025-11-28T15:48:49.143Z" }, + { url = "https://files.pythonhosted.org/packages/e4/93/a86a5608f74a22284a8ccea8592f6e270b61f95b8588951110ad797c2ddd/mypy-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6", size = 12718673, upload-time = "2025-11-28T15:47:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/3d/58/cf08fff9ced0423b858f2a7495001fda28dc058136818ee9dffc31534ea9/mypy-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2", size = 13608336, upload-time = "2025-11-28T15:48:32.625Z" }, + { url = "https://files.pythonhosted.org/packages/64/ed/9c509105c5a6d4b73bb08733102a3ea62c25bc02c51bca85e3134bf912d3/mypy-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431", size = 13833174, upload-time = "2025-11-28T15:45:48.091Z" }, + { url = "https://files.pythonhosted.org/packages/cd/71/01939b66e35c6f8cb3e6fdf0b657f0fd24de2f8ba5e523625c8e72328208/mypy-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018", size = 10112208, upload-time = "2025-11-28T15:46:41.702Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0d/a1357e6bb49e37ce26fcf7e3cc55679ce9f4ebee0cd8b6ee3a0e301a9210/mypy-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e", size = 13191993, upload-time = "2025-11-28T15:47:22.336Z" }, + { url = "https://files.pythonhosted.org/packages/5d/75/8e5d492a879ec4490e6ba664b5154e48c46c85b5ac9785792a5ec6a4d58f/mypy-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d", size = 12174411, upload-time = "2025-11-28T15:44:55.492Z" }, + { url = "https://files.pythonhosted.org/packages/71/31/ad5dcee9bfe226e8eaba777e9d9d251c292650130f0450a280aec3485370/mypy-1.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba", size = 12727751, upload-time = "2025-11-28T15:44:14.169Z" }, + { url = "https://files.pythonhosted.org/packages/77/06/b6b8994ce07405f6039701f4b66e9d23f499d0b41c6dd46ec28f96d57ec3/mypy-1.19.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364", size = 13593323, upload-time = "2025-11-28T15:46:34.699Z" }, + { url = "https://files.pythonhosted.org/packages/68/b1/126e274484cccdf099a8e328d4fda1c7bdb98a5e888fa6010b00e1bbf330/mypy-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee", size = 13818032, upload-time = "2025-11-28T15:46:18.286Z" }, + { url = "https://files.pythonhosted.org/packages/f8/56/53a8f70f562dfc466c766469133a8a4909f6c0012d83993143f2a9d48d2d/mypy-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53", size = 10120644, upload-time = "2025-11-28T15:47:43.99Z" }, + { url = "https://files.pythonhosted.org/packages/b0/f4/7751f32f56916f7f8c229fe902cbdba3e4dd3f3ea9e8b872be97e7fc546d/mypy-1.19.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f2e36bed3c6d9b5f35d28b63ca4b727cb0228e480826ffc8953d1892ddc8999d", size = 13185236, upload-time = "2025-11-28T15:45:20.696Z" }, + { url = "https://files.pythonhosted.org/packages/35/31/871a9531f09e78e8d145032355890384f8a5b38c95a2c7732d226b93242e/mypy-1.19.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a18d8abdda14035c5718acb748faec09571432811af129bf0d9e7b2d6699bf18", size = 12213902, upload-time = "2025-11-28T15:46:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/58/b8/af221910dd40eeefa2077a59107e611550167b9994693fc5926a0b0f87c0/mypy-1.19.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75e60aca3723a23511948539b0d7ed514dda194bc3755eae0bfc7a6b4887aa7", size = 12738600, upload-time = "2025-11-28T15:44:22.521Z" }, + { url = "https://files.pythonhosted.org/packages/11/9f/c39e89a3e319c1d9c734dedec1183b2cc3aefbab066ec611619002abb932/mypy-1.19.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f44f2ae3c58421ee05fe609160343c25f70e3967f6e32792b5a78006a9d850f", size = 13592639, upload-time = "2025-11-28T15:48:08.55Z" }, + { url = "https://files.pythonhosted.org/packages/97/6d/ffaf5f01f5e284d9033de1267e6c1b8f3783f2cf784465378a86122e884b/mypy-1.19.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:63ea6a00e4bd6822adbfc75b02ab3653a17c02c4347f5bb0cf1d5b9df3a05835", size = 13799132, upload-time = "2025-11-28T15:47:06.032Z" }, + { url = "https://files.pythonhosted.org/packages/fe/b0/c33921e73aaa0106224e5a34822411bea38046188eb781637f5a5b07e269/mypy-1.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:3ad925b14a0bb99821ff6f734553294aa6a3440a8cb082fe1f5b84dfb662afb1", size = 10269832, upload-time = "2025-11-28T15:47:29.392Z" }, + { url = "https://files.pythonhosted.org/packages/b4/59/a7748ef43446163a93159d82bb270c6c4f3d94c1fcbdd2a29a7e439e74d7/mypy-1.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0dde5cb375cb94deff0d4b548b993bec52859d1651e073d63a1386d392a95495", size = 13094255, upload-time = "2025-11-28T15:47:14.282Z" }, + { url = "https://files.pythonhosted.org/packages/f5/0b/92ebf5abc83f559a35dcba3bd9227726b04b04178f1e521f38e647b930eb/mypy-1.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1cf9c59398db1c68a134b0b5354a09a1e124523f00bacd68e553b8bd16ff3299", size = 12161414, upload-time = "2025-11-28T15:45:03.302Z" }, + { url = "https://files.pythonhosted.org/packages/aa/03/19412f0a786722055a52c01b4c5d71e5b5443a89f6bbcdd445408240e217/mypy-1.19.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3210d87b30e6af9c8faed61be2642fcbe60ef77cec64fa1ef810a630a4cf671c", size = 12756782, upload-time = "2025-11-28T15:46:49.522Z" }, + { url = "https://files.pythonhosted.org/packages/cb/85/395d53c9098b251414b0448cdadcd3277523ff36f5abda6d26ff945dbdb3/mypy-1.19.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2c1101ab41d01303103ab6ef82cbbfedb81c1a060c868fa7cc013d573d37ab5", size = 13503492, upload-time = "2025-11-28T15:48:57.339Z" }, + { url = "https://files.pythonhosted.org/packages/dd/33/1ab1113e3778617ae7aba66b4b537f90512bd279ff65b6c984fb91fbb2d3/mypy-1.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ea4fd21bb48f0da49e6d3b37ef6bd7e8228b9fe41bbf4d80d9364d11adbd43c", size = 13787703, upload-time = "2025-11-28T15:48:41.286Z" }, + { url = "https://files.pythonhosted.org/packages/4f/2d/8b0821b3e0d538de1ad96c86502256c7326274d5cb74e0b373efaada273f/mypy-1.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:16f76ff3f3fd8137aadf593cb4607d82634fca675e8211ad75c43d86033ee6c6", size = 10049225, upload-time = "2025-11-28T15:45:55.089Z" }, + { url = "https://files.pythonhosted.org/packages/09/0e/fe228ed5aeab470c6f4eb82481837fadb642a5aa95cc8215fd2214822c10/mypy-1.19.0-py3-none-any.whl", hash = "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9", size = 2469714, upload-time = "2025-11-28T15:45:33.22Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.10'" }, + { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pluggy", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, + { name = "iniconfig", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "packaging", marker = "python_full_version >= '3.10'" }, + { name = "pluggy", marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.10'" }, + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version == '3.10.*'" }, + { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.10' and python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "respx" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/d9/f7a0c4b3a2bf2556cd5d99b05372c29980249ef71e8e32669ba77428c82c/ruff-0.14.8.tar.gz", hash = "sha256:774ed0dd87d6ce925e3b8496feb3a00ac564bea52b9feb551ecd17e0a23d1eed", size = 5765385, upload-time = "2025-12-04T15:06:17.669Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/b8/9537b52010134b1d2b72870cc3f92d5fb759394094741b09ceccae183fbe/ruff-0.14.8-py3-none-linux_armv6l.whl", hash = "sha256:ec071e9c82eca417f6111fd39f7043acb53cd3fde9b1f95bbed745962e345afb", size = 13441540, upload-time = "2025-12-04T15:06:14.896Z" }, + { url = "https://files.pythonhosted.org/packages/24/00/99031684efb025829713682012b6dd37279b1f695ed1b01725f85fd94b38/ruff-0.14.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8cdb162a7159f4ca36ce980a18c43d8f036966e7f73f866ac8f493b75e0c27e9", size = 13669384, upload-time = "2025-12-04T15:06:51.809Z" }, + { url = "https://files.pythonhosted.org/packages/72/64/3eb5949169fc19c50c04f28ece2c189d3b6edd57e5b533649dae6ca484fe/ruff-0.14.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e2fcbefe91f9fad0916850edf0854530c15bd1926b6b779de47e9ab619ea38f", size = 12806917, upload-time = "2025-12-04T15:06:08.925Z" }, + { url = "https://files.pythonhosted.org/packages/c4/08/5250babb0b1b11910f470370ec0cbc67470231f7cdc033cee57d4976f941/ruff-0.14.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d70721066a296f45786ec31916dc287b44040f553da21564de0ab4d45a869b", size = 13256112, upload-time = "2025-12-04T15:06:23.498Z" }, + { url = "https://files.pythonhosted.org/packages/78/4c/6c588e97a8e8c2d4b522c31a579e1df2b4d003eddfbe23d1f262b1a431ff/ruff-0.14.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c87e09b3cd9d126fc67a9ecd3b5b1d3ded2b9c7fce3f16e315346b9d05cfb52", size = 13227559, upload-time = "2025-12-04T15:06:33.432Z" }, + { url = "https://files.pythonhosted.org/packages/23/ce/5f78cea13eda8eceac71b5f6fa6e9223df9b87bb2c1891c166d1f0dce9f1/ruff-0.14.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d62cb310c4fbcb9ee4ac023fe17f984ae1e12b8a4a02e3d21489f9a2a5f730c", size = 13896379, upload-time = "2025-12-04T15:06:02.687Z" }, + { url = "https://files.pythonhosted.org/packages/cf/79/13de4517c4dadce9218a20035b21212a4c180e009507731f0d3b3f5df85a/ruff-0.14.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1af35c2d62633d4da0521178e8a2641c636d2a7153da0bac1b30cfd4ccd91344", size = 15372786, upload-time = "2025-12-04T15:06:29.828Z" }, + { url = "https://files.pythonhosted.org/packages/00/06/33df72b3bb42be8a1c3815fd4fae83fa2945fc725a25d87ba3e42d1cc108/ruff-0.14.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25add4575ffecc53d60eed3f24b1e934493631b48ebbc6ebaf9d8517924aca4b", size = 14990029, upload-time = "2025-12-04T15:06:36.812Z" }, + { url = "https://files.pythonhosted.org/packages/64/61/0f34927bd90925880394de0e081ce1afab66d7b3525336f5771dcf0cb46c/ruff-0.14.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c943d847b7f02f7db4201a0600ea7d244d8a404fbb639b439e987edcf2baf9a", size = 14407037, upload-time = "2025-12-04T15:06:39.979Z" }, + { url = "https://files.pythonhosted.org/packages/96/bc/058fe0aefc0fbf0d19614cb6d1a3e2c048f7dc77ca64957f33b12cfdc5ef/ruff-0.14.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6e8bf7b4f627548daa1b69283dac5a296bfe9ce856703b03130732e20ddfe2", size = 14102390, upload-time = "2025-12-04T15:06:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/e4f77b02b804546f4c17e8b37a524c27012dd6ff05855d2243b49a7d3cb9/ruff-0.14.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:7aaf2974f378e6b01d1e257c6948207aec6a9b5ba53fab23d0182efb887a0e4a", size = 14230793, upload-time = "2025-12-04T15:06:20.497Z" }, + { url = "https://files.pythonhosted.org/packages/3f/52/bb8c02373f79552e8d087cedaffad76b8892033d2876c2498a2582f09dcf/ruff-0.14.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e5758ca513c43ad8a4ef13f0f081f80f08008f410790f3611a21a92421ab045b", size = 13160039, upload-time = "2025-12-04T15:06:49.06Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ad/b69d6962e477842e25c0b11622548df746290cc6d76f9e0f4ed7456c2c31/ruff-0.14.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f74f7ba163b6e85a8d81a590363bf71618847e5078d90827749bfda1d88c9cdf", size = 13205158, upload-time = "2025-12-04T15:06:54.574Z" }, + { url = "https://files.pythonhosted.org/packages/06/63/54f23da1315c0b3dfc1bc03fbc34e10378918a20c0b0f086418734e57e74/ruff-0.14.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eed28f6fafcc9591994c42254f5a5c5ca40e69a30721d2ab18bb0bb3baac3ab6", size = 13469550, upload-time = "2025-12-04T15:05:59.209Z" }, + { url = "https://files.pythonhosted.org/packages/70/7d/a4d7b1961e4903bc37fffb7ddcfaa7beb250f67d97cfd1ee1d5cddb1ec90/ruff-0.14.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:21d48fa744c9d1cb8d71eb0a740c4dd02751a5de9db9a730a8ef75ca34cf138e", size = 14211332, upload-time = "2025-12-04T15:06:06.027Z" }, + { url = "https://files.pythonhosted.org/packages/5d/93/2a5063341fa17054e5c86582136e9895db773e3c2ffb770dde50a09f35f0/ruff-0.14.8-py3-none-win32.whl", hash = "sha256:15f04cb45c051159baebb0f0037f404f1dc2f15a927418f29730f411a79bc4e7", size = 13151890, upload-time = "2025-12-04T15:06:11.668Z" }, + { url = "https://files.pythonhosted.org/packages/02/1c/65c61a0859c0add13a3e1cbb6024b42de587456a43006ca2d4fd3d1618fe/ruff-0.14.8-py3-none-win_amd64.whl", hash = "sha256:9eeb0b24242b5bbff3011409a739929f497f3fb5fe3b5698aba5e77e8c833097", size = 14537826, upload-time = "2025-12-04T15:06:26.409Z" }, + { url = "https://files.pythonhosted.org/packages/6d/63/8b41cea3afd7f58eb64ac9251668ee0073789a3bc9ac6f816c8c6fef986d/ruff-0.14.8-py3-none-win_arm64.whl", hash = "sha256:965a582c93c63fe715fd3e3f8aa37c4b776777203d8e1d8aa3cc0c14424a4b99", size = 13634522, upload-time = "2025-12-04T15:06:43.212Z" }, +] + +[[package]] +name = "setuptools" +version = "80.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/95/faf61eb8363f26aa7e1d762267a8d602a1b26d4f3a1e758e92cb3cb8b054/setuptools-80.10.2.tar.gz", hash = "sha256:8b0e9d10c784bf7d262c4e5ec5d4ec94127ce206e8738f29a437945fbc219b70", size = 1200343, upload-time = "2026-01-25T22:38:17.252Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/b8/f1f62a5e3c0ad2ff1d189590bfa4c46b4f3b6e49cef6f26c6ee4e575394d/setuptools-80.10.2-py3-none-any.whl", hash = "sha256:95b30ddfb717250edb492926c92b5221f7ef3fbcc2b07579bcd4a27da21d0173", size = 1064234, upload-time = "2026-01-25T22:38:15.216Z" }, +] + +[[package]] +name = "tokenize-rt" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/ed/8f07e893132d5051d86a553e749d5c89b2a4776eb3a579b72ed61f8559ca/tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6", size = 5476, upload-time = "2025-05-23T23:48:00.035Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/f0/3fe8c6e69135a845f4106f2ff8b6805638d4e85c264e70114e8126689587/tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44", size = 6004, upload-time = "2025-05-23T23:47:58.812Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "typesense" +source = { virtual = "." } +dependencies = [ + { name = "httpx" }, + { name = "typing-extensions" }, +] + +[package.dev-dependencies] +dev = [ + { name = "coverage", version = "7.10.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "coverage", version = "7.13.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "faker", version = "37.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "faker", version = "38.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "isort", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "isort", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "mypy" }, + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pytest-asyncio", version = "1.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pytest-asyncio", version = "1.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pytest-mock" }, + { name = "python-dotenv" }, + { name = "requests" }, + { name = "respx" }, + { name = "ruff" }, + { name = "unasync" }, +] + +[package.metadata] +requires-dist = [ + { name = "httpx", specifier = ">=0.28.1" }, + { name = "typing-extensions" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "coverage" }, + { name = "faker" }, + { name = "isort", specifier = ">=6.0.1" }, + { name = "mypy", specifier = ">=1.19.0" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-mock" }, + { name = "python-dotenv" }, + { name = "requests" }, + { name = "respx", specifier = ">=0.22.0" }, + { name = "ruff", specifier = ">=0.11.11" }, + { name = "unasync", specifier = ">=0.6.0" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "unasync" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, + { name = "tokenize-rt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/4e/735dbc0885ca197bcd80a2479ca24035627e2e768c784261fc7f1b8d7600/unasync-0.6.0.tar.gz", hash = "sha256:a9d01ace3e1068b20550ab15b7f9723b15b8bcde728bc1770bcb578374c7ee58", size = 18755, upload-time = "2024-05-03T11:14:58.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/b5/d2842541718ffa12060854735587543120a31ebc339435e0bd0faf368541/unasync-0.6.0-py3-none-any.whl", hash = "sha256:9cf7aaaea9737e417d8949bf9be55dc25fdb4ef1f4edc21b58f76ff0d2b9d73f", size = 9959, upload-time = "2024-05-03T11:14:56.17Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/1d/0f3a93cca1ac5e8287842ed4eebbd0f7a991315089b1a0b01c7788aa7b63/urllib3-2.6.1.tar.gz", hash = "sha256:5379eb6e1aba4088bae84f8242960017ec8d8e3decf30480b3a1abdaa9671a3f", size = 432678, upload-time = "2025-12-08T15:25:26.773Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/56/190ceb8cb10511b730b564fb1e0293fa468363dbad26145c34928a60cb0c/urllib3-2.6.1-py3-none-any.whl", hash = "sha256:e67d06fe947c36a7ca39f4994b08d73922d40e6cca949907be05efa6fd75110b", size = 131138, upload-time = "2025-12-08T15:25:25.51Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]