From 1d526a018060d158e7548366216491c413756872 Mon Sep 17 00:00:00 2001 From: Marcel Telka Date: Thu, 21 Dec 2023 11:51:11 +0100 Subject: [PATCH 01/26] `types-certifi` and `types-chardet` are no longer needed (#3015) --- requirements.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3597bc37f5..3fb85ca726 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,6 @@ # Optional charset auto-detection # Used in our test cases chardet==5.2.0 -types-chardet==5.0.4.5 # Documentation mkdocs==1.5.3 @@ -22,7 +21,6 @@ twine==4.0.2 coverage[toml]==7.3.0 cryptography==41.0.7 mypy==1.5.1 -types-certifi==2021.10.8.2 pytest==7.4.3 ruff==0.1.6 trio==0.22.2 From 1a660147edf1d32af440fdfb115b71238eda7498 Mon Sep 17 00:00:00 2001 From: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> Date: Thu, 28 Dec 2023 16:50:43 +0400 Subject: [PATCH 02/26] Add missing argument (#3023) * Add missing argument * chaneglog * changelog --- CHANGELOG.md | 6 ++++++ httpx/_client.py | 1 + 2 files changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 84b8709cbe..c895f68814 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## Unreleased + +### Fixed + +* Respect the `http1` argument while configuring proxy transports. (#3023) + ## 0.26.0 (20th December, 2023) ### Added diff --git a/httpx/_client.py b/httpx/_client.py index 2813a84f01..422bf0fad8 100644 --- a/httpx/_client.py +++ b/httpx/_client.py @@ -1493,6 +1493,7 @@ def _init_proxy_transport( return AsyncHTTPTransport( verify=verify, cert=cert, + http1=http1, http2=http2, limits=limits, trust_env=trust_env, From dd5304d3eb97f0aad7126a45f3fd7041dfee0ac2 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Thu, 28 Dec 2023 13:10:37 +0000 Subject: [PATCH 03/26] Tidy up import (#3020) Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- tests/test_asgi.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_asgi.py b/tests/test_asgi.py index 8bb6dcb7bc..2971506097 100644 --- a/tests/test_asgi.py +++ b/tests/test_asgi.py @@ -3,7 +3,6 @@ import pytest import httpx -from httpx import ASGITransport async def hello_world(scope, receive, send): @@ -209,7 +208,7 @@ async def read_body(scope, receive, send): @pytest.mark.anyio async def test_asgi_exc_no_raise(): - transport = ASGITransport(app=raise_exc, raise_app_exceptions=False) + transport = httpx.ASGITransport(app=raise_exc, raise_app_exceptions=False) async with httpx.AsyncClient(transport=transport) as client: response = await client.get("http://www.example.org/") From b871b4b8b29aca2e675645fae0c9f8e7d2a5e7d5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:11:45 +0400 Subject: [PATCH 04/26] Bump mkdocs-material from 9.4.14 to 9.5.3 (#3035) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.4.14 to 9.5.3. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.4.14...9.5.3) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3fb85ca726..c3619d2d8b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ chardet==5.2.0 # Documentation mkdocs==1.5.3 mkautodoc==0.2.0 -mkdocs-material==9.4.14 +mkdocs-material==9.5.3 # Packaging build==1.0.3 From ea3071642d12ed546d901861a5901da6fad37073 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:14:26 +0400 Subject: [PATCH 05/26] Bump ruff from 0.1.6 to 0.1.9 (#3031) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.1.6 to 0.1.9. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.1.6...v0.1.9) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c3619d2d8b..af1caa5c5a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,7 +22,7 @@ coverage[toml]==7.3.0 cryptography==41.0.7 mypy==1.5.1 pytest==7.4.3 -ruff==0.1.6 +ruff==0.1.9 trio==0.22.2 trio-typing==0.10.0 trustme==1.1.0 From f1ed7463086f0aed97afa61a1197c51840d3bb2e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:36:16 +0400 Subject: [PATCH 06/26] Bump actions/setup-python from 4 to 5 (#3036) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- .github/workflows/publish.yml | 2 +- .github/workflows/test-suite.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4ceb8c69e0..2c7cfb9f9b 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: "actions/checkout@v4" - - uses: "actions/setup-python@v4" + - uses: "actions/setup-python@v5" with: python-version: 3.8 - name: "Install dependencies" diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index c3ad08f145..0bb570cedb 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -18,7 +18,7 @@ jobs: steps: - uses: "actions/checkout@v4" - - uses: "actions/setup-python@v4" + - uses: "actions/setup-python@v5" with: python-version: "${{ matrix.python-version }}" allow-prereleases: true From 4ddff16bbe41c0bc4b8a5ea3a2c204d8e99478f3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:44:26 +0400 Subject: [PATCH 07/26] Bump coverage[toml] from 7.3.0 to 7.4.0 (#3034) Bumps [coverage[toml]](https://github.com/nedbat/coveragepy) from 7.3.0 to 7.4.0. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.3.0...7.4.0) --- updated-dependencies: - dependency-name: coverage[toml] dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index af1caa5c5a..5b737a2c18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,7 +18,7 @@ build==1.0.3 twine==4.0.2 # Tests & Linting -coverage[toml]==7.3.0 +coverage[toml]==7.4.0 cryptography==41.0.7 mypy==1.5.1 pytest==7.4.3 From ebc1393c5cf6430e3027bdb724368a9d6e3429ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 11:18:02 +0400 Subject: [PATCH 08/26] Bump pytest from 7.4.3 to 7.4.4 (#3032) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.3 to 7.4.4. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.3...7.4.4) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5b737a2c18..c7b24a896c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,7 +21,7 @@ twine==4.0.2 coverage[toml]==7.4.0 cryptography==41.0.7 mypy==1.5.1 -pytest==7.4.3 +pytest==7.4.4 ruff==0.1.9 trio==0.22.2 trio-typing==0.10.0 From c6907c22034e2739c4c1af89908e3c9f90602788 Mon Sep 17 00:00:00 2001 From: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> Date: Thu, 4 Jan 2024 19:03:09 +0400 Subject: [PATCH 09/26] Remove unused type: ignore (#3038) * Remove unused type: ignore * Bump mypy version * Revert "Bump mypy version" This reverts commit 55b44b5d2f6f6b6417c197c653a43e3db3bf8804. * Bump mypy --------- Co-authored-by: Tom Christie --- httpx/_config.py | 2 +- httpx/_multipart.py | 4 ++-- requirements.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/httpx/_config.py b/httpx/_config.py index 05c096dfc3..0cfd552e49 100644 --- a/httpx/_config.py +++ b/httpx/_config.py @@ -185,7 +185,7 @@ def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: ssl_context.load_cert_chain( certfile=self.cert[0], keyfile=self.cert[1], - password=self.cert[2], # type: ignore + password=self.cert[2], ) diff --git a/httpx/_multipart.py b/httpx/_multipart.py index 5122d5114f..1d451c382b 100644 --- a/httpx/_multipart.py +++ b/httpx/_multipart.py @@ -104,9 +104,9 @@ def __init__(self, name: str, value: FileTypes) -> None: if len(value) == 2: # neither the 3rd parameter (content_type) nor the 4th (headers) # was included - filename, fileobj = value # type: ignore + filename, fileobj = value elif len(value) == 3: - filename, fileobj, content_type = value # type: ignore + filename, fileobj, content_type = value else: # all 4 parameters included filename, fileobj, content_type, headers = value # type: ignore diff --git a/requirements.txt b/requirements.txt index c7b24a896c..218f06c6e5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ twine==4.0.2 # Tests & Linting coverage[toml]==7.4.0 cryptography==41.0.7 -mypy==1.5.1 +mypy==1.8.0 pytest==7.4.4 ruff==0.1.9 trio==0.22.2 From ca51b4532a4495c38c8abf1a393dbd80099deae3 Mon Sep 17 00:00:00 2001 From: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> Date: Mon, 8 Jan 2024 15:09:14 +0400 Subject: [PATCH 10/26] Keep clients in sync (#3042) * Keep clients in sync * Update httpx/_client.py * Update httpx/_client.py --- httpx/_client.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/httpx/_client.py b/httpx/_client.py index 422bf0fad8..a0b4209c46 100644 --- a/httpx/_client.py +++ b/httpx/_client.py @@ -598,6 +598,8 @@ class Client(BaseClient): to authenticate the client. Either a path to an SSL certificate file, or two-tuple of (certificate file, key file), or a three-tuple of (certificate file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. @@ -1311,6 +1313,8 @@ class AsyncClient(BaseClient): An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + It can be shared between tasks. + Usage: ```python @@ -1544,6 +1548,15 @@ async def request( [0]: /advanced/#merging-of-configuration """ + + if cookies is not None: # pragma: no cover + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + request = self.build_request( method=method, url=url, @@ -1656,7 +1669,7 @@ async def send( return response - except BaseException as exc: # pragma: no cover + except BaseException as exc: await response.aclose() raise exc From 99cba6ac642b12f0331d069ae324402c54c4c600 Mon Sep 17 00:00:00 2001 From: Tereza Tomcova Date: Wed, 10 Jan 2024 11:08:42 +0100 Subject: [PATCH 11/26] Fix RFC 2069 mode digest authentication (#3045) * Fix RFC 2069 mode digest authentication * Update CHANGELOG.md --- CHANGELOG.md | 1 + httpx/_auth.py | 7 +- tests/test_auth.py | 165 +++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 170 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c895f68814..47ac88c834 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Fixed * Respect the `http1` argument while configuring proxy transports. (#3023) +* Fix RFC 2069 mode digest authentication. (#3045) ## 0.26.0 (20th December, 2023) diff --git a/httpx/_auth.py b/httpx/_auth.py index 66132500ff..e8bc0cd961 100644 --- a/httpx/_auth.py +++ b/httpx/_auth.py @@ -280,17 +280,18 @@ def digest(data: bytes) -> bytes: qop = self._resolve_qop(challenge.qop, request=request) if qop is None: + # Following RFC 2069 digest_data = [HA1, challenge.nonce, HA2] else: - digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2] - key_digest = b":".join(digest_data) + # Following RFC 2617/7616 + digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] format_args = { "username": self._username, "realm": challenge.realm, "nonce": challenge.nonce, "uri": path, - "response": digest(b":".join((HA1, key_digest))), + "response": digest(b":".join(digest_data)), "algorithm": challenge.algorithm.encode(), } if challenge.opaque: diff --git a/tests/test_auth.py b/tests/test_auth.py index 563256954d..7bb45de566 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -140,3 +140,168 @@ def test_digest_auth_setting_cookie_in_request(): ) with pytest.raises(StopIteration): flow.send(response) + + +def test_digest_auth_rfc_2069(): + # Example from https://datatracker.ietf.org/doc/html/rfc2069#section-2.4 + # with corrected response from https://www.rfc-editor.org/errata/eid749 + + auth = httpx.DigestAuth(username="Mufasa", password="CircleOfLife") + request = httpx.Request("GET", "https://www.example.com/dir/index.html") + + # The initial request should not include an auth header. + flow = auth.sync_auth_flow(request) + request = next(flow) + assert "Authorization" not in request.headers + + # If a 401 response is returned, then a digest auth request is made. + headers = { + "WWW-Authenticate": ( + 'Digest realm="testrealm@host.com", ' + 'nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", ' + 'opaque="5ccc069c403ebaf9f0171e9517f40e41"' + ) + } + response = httpx.Response( + content=b"Auth required", status_code=401, headers=headers, request=request + ) + request = flow.send(response) + assert request.headers["Authorization"].startswith("Digest") + assert 'username="Mufasa"' in request.headers["Authorization"] + assert 'realm="testrealm@host.com"' in request.headers["Authorization"] + assert ( + 'nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093"' in request.headers["Authorization"] + ) + assert 'uri="/dir/index.html"' in request.headers["Authorization"] + assert ( + 'opaque="5ccc069c403ebaf9f0171e9517f40e41"' in request.headers["Authorization"] + ) + assert ( + 'response="1949323746fe6a43ef61f9606e7febea"' + in request.headers["Authorization"] + ) + + # No other requests are made. + response = httpx.Response(content=b"Hello, world!", status_code=200) + with pytest.raises(StopIteration): + flow.send(response) + + +def test_digest_auth_rfc_7616_md5(monkeypatch): + # Example from https://datatracker.ietf.org/doc/html/rfc7616#section-3.9.1 + + def mock_get_client_nonce(nonce_count: int, nonce: bytes) -> bytes: + return "f2/wE4q74E6zIJEtWaHKaf5wv/H5QzzpXusqGemxURZJ".encode() + + auth = httpx.DigestAuth(username="Mufasa", password="Circle of Life") + monkeypatch.setattr(auth, "_get_client_nonce", mock_get_client_nonce) + + request = httpx.Request("GET", "https://www.example.com/dir/index.html") + + # The initial request should not include an auth header. + flow = auth.sync_auth_flow(request) + request = next(flow) + assert "Authorization" not in request.headers + + # If a 401 response is returned, then a digest auth request is made. + headers = { + "WWW-Authenticate": ( + 'Digest realm="http-auth@example.org", ' + 'qop="auth, auth-int", ' + "algorithm=MD5, " + 'nonce="7ypf/xlj9XXwfDPEoM4URrv/xwf94BcCAzFZH4GiTo0v", ' + 'opaque="FQhe/qaU925kfnzjCev0ciny7QMkPqMAFRtzCUYo5tdS"' + ) + } + response = httpx.Response( + content=b"Auth required", status_code=401, headers=headers, request=request + ) + request = flow.send(response) + assert request.headers["Authorization"].startswith("Digest") + assert 'username="Mufasa"' in request.headers["Authorization"] + assert 'realm="http-auth@example.org"' in request.headers["Authorization"] + assert 'uri="/dir/index.html"' in request.headers["Authorization"] + assert "algorithm=MD5" in request.headers["Authorization"] + assert ( + 'nonce="7ypf/xlj9XXwfDPEoM4URrv/xwf94BcCAzFZH4GiTo0v"' + in request.headers["Authorization"] + ) + assert "nc=00000001" in request.headers["Authorization"] + assert ( + 'cnonce="f2/wE4q74E6zIJEtWaHKaf5wv/H5QzzpXusqGemxURZJ"' + in request.headers["Authorization"] + ) + assert "qop=auth" in request.headers["Authorization"] + assert ( + 'opaque="FQhe/qaU925kfnzjCev0ciny7QMkPqMAFRtzCUYo5tdS"' + in request.headers["Authorization"] + ) + assert ( + 'response="8ca523f5e9506fed4657c9700eebdbec"' + in request.headers["Authorization"] + ) + + # No other requests are made. + response = httpx.Response(content=b"Hello, world!", status_code=200) + with pytest.raises(StopIteration): + flow.send(response) + + +def test_digest_auth_rfc_7616_sha_256(monkeypatch): + # Example from https://datatracker.ietf.org/doc/html/rfc7616#section-3.9.1 + + def mock_get_client_nonce(nonce_count: int, nonce: bytes) -> bytes: + return "f2/wE4q74E6zIJEtWaHKaf5wv/H5QzzpXusqGemxURZJ".encode() + + auth = httpx.DigestAuth(username="Mufasa", password="Circle of Life") + monkeypatch.setattr(auth, "_get_client_nonce", mock_get_client_nonce) + + request = httpx.Request("GET", "https://www.example.com/dir/index.html") + + # The initial request should not include an auth header. + flow = auth.sync_auth_flow(request) + request = next(flow) + assert "Authorization" not in request.headers + + # If a 401 response is returned, then a digest auth request is made. + headers = { + "WWW-Authenticate": ( + 'Digest realm="http-auth@example.org", ' + 'qop="auth, auth-int", ' + "algorithm=SHA-256, " + 'nonce="7ypf/xlj9XXwfDPEoM4URrv/xwf94BcCAzFZH4GiTo0v", ' + 'opaque="FQhe/qaU925kfnzjCev0ciny7QMkPqMAFRtzCUYo5tdS"' + ) + } + response = httpx.Response( + content=b"Auth required", status_code=401, headers=headers, request=request + ) + request = flow.send(response) + assert request.headers["Authorization"].startswith("Digest") + assert 'username="Mufasa"' in request.headers["Authorization"] + assert 'realm="http-auth@example.org"' in request.headers["Authorization"] + assert 'uri="/dir/index.html"' in request.headers["Authorization"] + assert "algorithm=SHA-256" in request.headers["Authorization"] + assert ( + 'nonce="7ypf/xlj9XXwfDPEoM4URrv/xwf94BcCAzFZH4GiTo0v"' + in request.headers["Authorization"] + ) + assert "nc=00000001" in request.headers["Authorization"] + assert ( + 'cnonce="f2/wE4q74E6zIJEtWaHKaf5wv/H5QzzpXusqGemxURZJ"' + in request.headers["Authorization"] + ) + assert "qop=auth" in request.headers["Authorization"] + assert ( + 'opaque="FQhe/qaU925kfnzjCev0ciny7QMkPqMAFRtzCUYo5tdS"' + in request.headers["Authorization"] + ) + assert ( + 'response="753927fa0e85d155564e2e272a28d1802ca10daf4496794697cf8db5856cb6c1"' + in request.headers["Authorization"] + ) + + # No other requests are made. + response = httpx.Response(content=b"Hello, world!", status_code=200) + with pytest.raises(StopIteration): + flow.send(response) From ab720d325808e1d092787995c2023c9aeb24aff5 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Fri, 12 Jan 2024 19:48:03 +0100 Subject: [PATCH 12/26] Group dependencies on dependabot updates (#3054) --- .github/dependabot.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 32fd80e7ba..ec7ea763db 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,6 +4,10 @@ updates: directory: "/" schedule: interval: "monthly" + groups: + python-packages: + patterns: + - "*" - package-ecosystem: "github-actions" directory: "/" schedule: From 8cd952c88fc2d0469cee8f08c2a580652b084375 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Mon, 15 Jan 2024 09:48:56 +0000 Subject: [PATCH 13/26] Docs restructuring. (#3049) * Tweak docs layout * Move client docs into folder * Add clients/authentication section * Client authentication docs * Fix authentication example * SSL Context * Timeouts * Event hooks * Proxies, Transports * Text encodings * Resource limits * 'Clients' -> 'Advanced' * 'Clients' -> 'Advanced' * Add client docs --------- Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- docs/advanced.md | 1296 ------------------------------ docs/advanced/authentication.md | 232 ++++++ docs/advanced/clients.md | 324 ++++++++ docs/advanced/event-hooks.md | 65 ++ docs/advanced/proxies.md | 83 ++ docs/advanced/resource-limits.md | 13 + docs/advanced/ssl.md | 100 +++ docs/advanced/text-encodings.md | 75 ++ docs/advanced/timeouts.md | 71 ++ docs/advanced/transports.md | 344 ++++++++ mkdocs.yml | 16 +- 11 files changed, 1318 insertions(+), 1301 deletions(-) delete mode 100644 docs/advanced.md create mode 100644 docs/advanced/authentication.md create mode 100644 docs/advanced/clients.md create mode 100644 docs/advanced/event-hooks.md create mode 100644 docs/advanced/proxies.md create mode 100644 docs/advanced/resource-limits.md create mode 100644 docs/advanced/ssl.md create mode 100644 docs/advanced/text-encodings.md create mode 100644 docs/advanced/timeouts.md create mode 100644 docs/advanced/transports.md diff --git a/docs/advanced.md b/docs/advanced.md deleted file mode 100644 index bb003a1a52..0000000000 --- a/docs/advanced.md +++ /dev/null @@ -1,1296 +0,0 @@ -# Advanced Usage - -## Client Instances - -!!! hint - If you are coming from Requests, `httpx.Client()` is what you can use instead of `requests.Session()`. - -### Why use a Client? - -!!! note "TL;DR" - If you do anything more than experimentation, one-off scripts, or prototypes, then you should use a `Client` instance. - -#### More efficient usage of network resources - -When you make requests using the top-level API as documented in the [Quickstart](quickstart.md) guide, HTTPX has to establish a new connection _for every single request_ (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient. - -On the other hand, a `Client` instance uses [HTTP connection pooling](https://en.wikipedia.org/wiki/HTTP_persistent_connection). This means that when you make several requests to the same host, the `Client` will reuse the underlying TCP connection, instead of recreating one for every single request. - -This can bring **significant performance improvements** compared to using the top-level API, including: - -- Reduced latency across requests (no handshaking). -- Reduced CPU usage and round-trips. -- Reduced network congestion. - -#### Extra features - -`Client` instances also support features that aren't available at the top-level API, such as: - -- Cookie persistence across requests. -- Applying configuration across all outgoing requests. -- Sending requests through HTTP proxies. -- Using [HTTP/2](http2.md). - -The other sections on this page go into further detail about what you can do with a `Client` instance. - -### Usage - -The recommended way to use a `Client` is as a context manager. This will ensure that connections are properly cleaned up when leaving the `with` block: - -```python -with httpx.Client() as client: - ... -``` - -Alternatively, you can explicitly close the connection pool without block-usage using `.close()`: - -```python -client = httpx.Client() -try: - ... -finally: - client.close() -``` - -### Making requests - -Once you have a `Client`, you can send requests using `.get()`, `.post()`, etc. For example: - -```pycon ->>> with httpx.Client() as client: -... r = client.get('https://example.com') -... ->>> r - -``` - -These methods accept the same arguments as `httpx.get()`, `httpx.post()`, etc. This means that all features documented in the [Quickstart](quickstart.md) guide are also available at the client level. - -For example, to send a request with custom headers: - -```pycon ->>> with httpx.Client() as client: -... headers = {'X-Custom': 'value'} -... r = client.get('https://example.com', headers=headers) -... ->>> r.request.headers['X-Custom'] -'value' -``` - -### Sharing configuration across requests - -Clients allow you to apply configuration to all outgoing requests by passing parameters to the `Client` constructor. - -For example, to apply a set of custom headers _on every request_: - -```pycon ->>> url = 'http://httpbin.org/headers' ->>> headers = {'user-agent': 'my-app/0.0.1'} ->>> with httpx.Client(headers=headers) as client: -... r = client.get(url) -... ->>> r.json()['headers']['User-Agent'] -'my-app/0.0.1' -``` - -### Merging of configuration - -When a configuration option is provided at both the client-level and request-level, one of two things can happen: - -- For headers, query parameters and cookies, the values are combined together. For example: - -```pycon ->>> headers = {'X-Auth': 'from-client'} ->>> params = {'client_id': 'client1'} ->>> with httpx.Client(headers=headers, params=params) as client: -... headers = {'X-Custom': 'from-request'} -... params = {'request_id': 'request1'} -... r = client.get('https://example.com', headers=headers, params=params) -... ->>> r.request.url -URL('https://example.com?client_id=client1&request_id=request1') ->>> r.request.headers['X-Auth'] -'from-client' ->>> r.request.headers['X-Custom'] -'from-request' -``` - -- For all other parameters, the request-level value takes priority. For example: - -```pycon ->>> with httpx.Client(auth=('tom', 'mot123')) as client: -... r = client.get('https://example.com', auth=('alice', 'ecila123')) -... ->>> _, _, auth = r.request.headers['Authorization'].partition(' ') ->>> import base64 ->>> base64.b64decode(auth) -b'alice:ecila123' -``` - -If you need finer-grained control on the merging of client-level and request-level parameters, see [Request instances](#request-instances). - -### Other Client-only configuration options - -Additionally, `Client` accepts some configuration options that aren't available at the request level. - -For example, `base_url` allows you to prepend an URL to all outgoing requests: - -```pycon ->>> with httpx.Client(base_url='http://httpbin.org') as client: -... r = client.get('/headers') -... ->>> r.request.url -URL('http://httpbin.org/headers') -``` - -For a list of all available client parameters, see the [`Client`](api.md#client) API reference. - ---- - -## Character set encodings and auto-detection - -When accessing `response.text`, we need to decode the response bytes into a unicode text representation. - -By default `httpx` will use `"charset"` information included in the response `Content-Type` header to determine how the response bytes should be decoded into text. - -In cases where no charset information is included on the response, the default behaviour is to assume "utf-8" encoding, which is by far the most widely used text encoding on the internet. - -### Using the default encoding - -To understand this better let's start by looking at the default behaviour for text decoding... - -```python -import httpx -# Instantiate a client with the default configuration. -client = httpx.Client() -# Using the client... -response = client.get(...) -print(response.encoding) # This will either print the charset given in - # the Content-Type charset, or else "utf-8". -print(response.text) # The text will either be decoded with the Content-Type - # charset, or using "utf-8". -``` - -This is normally absolutely fine. Most servers will respond with a properly formatted Content-Type header, including a charset encoding. And in most cases where no charset encoding is included, UTF-8 is very likely to be used, since it is so widely adopted. - -### Using an explicit encoding - -In some cases we might be making requests to a site where no character set information is being set explicitly by the server, but we know what the encoding is. In this case it's best to set the default encoding explicitly on the client. - -```python -import httpx -# Instantiate a client with a Japanese character set as the default encoding. -client = httpx.Client(default_encoding="shift-jis") -# Using the client... -response = client.get(...) -print(response.encoding) # This will either print the charset given in - # the Content-Type charset, or else "shift-jis". -print(response.text) # The text will either be decoded with the Content-Type - # charset, or using "shift-jis". -``` - -### Using character set auto-detection - -In cases where the server is not reliably including character set information, and where we don't know what encoding is being used, we can enable auto-detection to make a best-guess attempt when decoding from bytes to text. - -To use auto-detection you need to set the `default_encoding` argument to a callable instead of a string. This callable should be a function which takes the input bytes as an argument and returns the character set to use for decoding those bytes to text. - -There are two widely used Python packages which both handle this functionality: - -* [`chardet`](https://chardet.readthedocs.io/) - This is a well established package, and is a port of [the auto-detection code in Mozilla](https://www-archive.mozilla.org/projects/intl/chardet.html). -* [`charset-normalizer`](https://charset-normalizer.readthedocs.io/) - A newer package, motivated by `chardet`, with a different approach. - -Let's take a look at installing autodetection using one of these packages... - - ```shell -$ pip install httpx -$ pip install chardet - ``` - -Once `chardet` is installed, we can configure a client to use character-set autodetection. - -```python -import httpx -import chardet - -def autodetect(content): - return chardet.detect(content).get("encoding") - -# Using a client with character-set autodetection enabled. -client = httpx.Client(default_encoding=autodetect) -response = client.get(...) -print(response.encoding) # This will either print the charset given in - # the Content-Type charset, or else the auto-detected - # character set. -print(response.text) -``` - ---- - -## Calling into Python Web Apps - -You can configure an `httpx` client to call directly into a Python web application using the WSGI protocol. - -This is particularly useful for two main use-cases: - -* Using `httpx` as a client inside test cases. -* Mocking out external services during tests or in dev/staging environments. - -Here's an example of integrating against a Flask application: - -```python -from flask import Flask -import httpx - - -app = Flask(__name__) - -@app.route("/") -def hello(): - return "Hello World!" - -with httpx.Client(app=app, base_url="http://testserver") as client: - r = client.get("/") - assert r.status_code == 200 - assert r.text == "Hello World!" -``` - -For some more complex cases you might need to customize the WSGI transport. This allows you to: - -* Inspect 500 error responses rather than raise exceptions by setting `raise_app_exceptions=False`. -* Mount the WSGI application at a subpath by setting `script_name` (WSGI). -* Use a given client address for requests by setting `remote_addr` (WSGI). - -For example: - -```python -# Instantiate a client that makes WSGI requests with a client IP of "1.2.3.4". -transport = httpx.WSGITransport(app=app, remote_addr="1.2.3.4") -with httpx.Client(transport=transport, base_url="http://testserver") as client: - ... -``` - -## Request instances - -For maximum control on what gets sent over the wire, HTTPX supports building explicit [`Request`](api.md#request) instances: - -```python -request = httpx.Request("GET", "https://example.com") -``` - -To dispatch a `Request` instance across to the network, create a [`Client` instance](#client-instances) and use `.send()`: - -```python -with httpx.Client() as client: - response = client.send(request) - ... -``` - -If you need to mix client-level and request-level options in a way that is not supported by the default [Merging of parameters](#merging-of-parameters), you can use `.build_request()` and then make arbitrary modifications to the `Request` instance. For example: - -```python -headers = {"X-Api-Key": "...", "X-Client-ID": "ABC123"} - -with httpx.Client(headers=headers) as client: - request = client.build_request("GET", "https://api.example.com") - - print(request.headers["X-Client-ID"]) # "ABC123" - - # Don't send the API key for this particular request. - del request.headers["X-Api-Key"] - - response = client.send(request) - ... -``` - -## Event Hooks - -HTTPX allows you to register "event hooks" with the client, that are called -every time a particular type of event takes place. - -There are currently two event hooks: - -* `request` - Called after a request is fully prepared, but before it is sent to the network. Passed the `request` instance. -* `response` - Called after the response has been fetched from the network, but before it is returned to the caller. Passed the `response` instance. - -These allow you to install client-wide functionality such as logging, monitoring or tracing. - -```python -def log_request(request): - print(f"Request event hook: {request.method} {request.url} - Waiting for response") - -def log_response(response): - request = response.request - print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}") - -client = httpx.Client(event_hooks={'request': [log_request], 'response': [log_response]}) -``` - -You can also use these hooks to install response processing code, such as this -example, which creates a client instance that always raises `httpx.HTTPStatusError` -on 4xx and 5xx responses. - -```python -def raise_on_4xx_5xx(response): - response.raise_for_status() - -client = httpx.Client(event_hooks={'response': [raise_on_4xx_5xx]}) -``` - -!!! note - Response event hooks are called before determining if the response body - should be read or not. - - If you need access to the response body inside an event hook, you'll - need to call `response.read()`, or for AsyncClients, `response.aread()`. - -The hooks are also allowed to modify `request` and `response` objects. - -```python -def add_timestamp(request): - request.headers['x-request-timestamp'] = datetime.now(tz=datetime.utc).isoformat() - -client = httpx.Client(event_hooks={'request': [add_timestamp]}) -``` - -Event hooks must always be set as a **list of callables**, and you may register -multiple event hooks for each type of event. - -As well as being able to set event hooks on instantiating the client, there -is also an `.event_hooks` property, that allows you to inspect and modify -the installed hooks. - -```python -client = httpx.Client() -client.event_hooks['request'] = [log_request] -client.event_hooks['response'] = [log_response, raise_on_4xx_5xx] -``` - -!!! note - If you are using HTTPX's async support, then you need to be aware that - hooks registered with `httpx.AsyncClient` MUST be async functions, - rather than plain functions. - -## Monitoring download progress - -If you need to monitor download progress of large responses, you can use response streaming and inspect the `response.num_bytes_downloaded` property. - -This interface is required for properly determining download progress, because the total number of bytes returned by `response.content` or `response.iter_content()` will not always correspond with the raw content length of the response if HTTP response compression is being used. - -For example, showing a progress bar using the [`tqdm`](https://github.com/tqdm/tqdm) library while a response is being downloaded could be done like this… - -```python -import tempfile - -import httpx -from tqdm import tqdm - -with tempfile.NamedTemporaryFile() as download_file: - url = "https://speed.hetzner.de/100MB.bin" - with httpx.stream("GET", url) as response: - total = int(response.headers["Content-Length"]) - - with tqdm(total=total, unit_scale=True, unit_divisor=1024, unit="B") as progress: - num_bytes_downloaded = response.num_bytes_downloaded - for chunk in response.iter_bytes(): - download_file.write(chunk) - progress.update(response.num_bytes_downloaded - num_bytes_downloaded) - num_bytes_downloaded = response.num_bytes_downloaded -``` - -![tqdm progress bar](img/tqdm-progress.gif) - -Or an alternate example, this time using the [`rich`](https://github.com/willmcgugan/rich) library… - -```python -import tempfile -import httpx -import rich.progress - -with tempfile.NamedTemporaryFile() as download_file: - url = "https://speed.hetzner.de/100MB.bin" - with httpx.stream("GET", url) as response: - total = int(response.headers["Content-Length"]) - - with rich.progress.Progress( - "[progress.percentage]{task.percentage:>3.0f}%", - rich.progress.BarColumn(bar_width=None), - rich.progress.DownloadColumn(), - rich.progress.TransferSpeedColumn(), - ) as progress: - download_task = progress.add_task("Download", total=total) - for chunk in response.iter_bytes(): - download_file.write(chunk) - progress.update(download_task, completed=response.num_bytes_downloaded) -``` - -![rich progress bar](img/rich-progress.gif) - -## Monitoring upload progress - -If you need to monitor upload progress of large responses, you can use request content generator streaming. - -For example, showing a progress bar using the [`tqdm`](https://github.com/tqdm/tqdm) library. - -```python -import io -import random - -import httpx -from tqdm import tqdm - - -def gen(): - """ - this is a complete example with generated random bytes. - you can replace `io.BytesIO` with real file object. - """ - total = 32 * 1024 * 1024 # 32m - with tqdm(ascii=True, unit_scale=True, unit='B', unit_divisor=1024, total=total) as bar: - with io.BytesIO(random.randbytes(total)) as f: - while data := f.read(1024): - yield data - bar.update(len(data)) - - -httpx.post("https://httpbin.org/post", content=gen()) -``` - -![tqdm progress bar](img/tqdm-progress.gif) - -## .netrc Support - -HTTPX can be configured to use [a `.netrc` config file](https://everything.curl.dev/usingcurl/netrc) for authentication. - -The `.netrc` config file allows authentication credentials to be associated with specified hosts. When a request is made to a host that is found in the netrc file, the username and password will be included using HTTP basic auth. - -Example `.netrc` file: - -``` -machine example.org -login example-username -password example-password - -machine python-httpx.org -login other-username -password other-password -``` - -Some examples of configuring `.netrc` authentication with `httpx`. - -Use the default `.netrc` file in the users home directory: - -```pycon ->>> auth = httpx.NetRCAuth() ->>> client = httpx.Client(auth=auth) -``` - -Use an explicit path to a `.netrc` file: - -```pycon ->>> auth = httpx.NetRCAuth(file="/path/to/.netrc") ->>> client = httpx.Client(auth=auth) -``` - -Use the `NETRC` environment variable to configure a path to the `.netrc` file, -or fallback to the default. - -```pycon ->>> auth = httpx.NetRCAuth(file=os.environ.get("NETRC")) ->>> client = httpx.Client(auth=auth) -``` - -The `NetRCAuth()` class uses [the `netrc.netrc()` function from the Python standard library](https://docs.python.org/3/library/netrc.html). See the documentation there for more details on exceptions that may be raised if the netrc file is not found, or cannot be parsed. - -## HTTP Proxying - -HTTPX supports setting up [HTTP proxies](https://en.wikipedia.org/wiki/Proxy_server#Web_proxy_servers) via the `proxy` parameter to be passed on client initialization or top-level API functions like `httpx.get(..., proxy=...)`. - -
- -
Diagram of how a proxy works (source: Wikipedia). The left hand side "Internet" blob may be your HTTPX client requesting example.com through a proxy.
-
- -### Example - -To route all traffic (HTTP and HTTPS) to a proxy located at `http://localhost:8030`, pass the proxy URL to the client... - -```python -with httpx.Client(proxy="http://localhost:8030") as client: - ... -``` - -For more advanced use cases, pass a mounts `dict`. For example, to route HTTP and HTTPS requests to 2 different proxies, respectively located at `http://localhost:8030`, and `http://localhost:8031`, pass a `dict` of proxy URLs: - -```python -proxy_mounts = { - "http://": httpx.HTTPTransport(proxy="http://localhost:8030"), - "https://": httpx.HTTPTransport(proxy="http://localhost:8031"), -} - -with httpx.Client(mounts=proxy_mounts) as client: - ... -``` - -For detailed information about proxy routing, see the [Routing](#routing) section. - -!!! tip "Gotcha" - In most cases, the proxy URL for the `https://` key _should_ use the `http://` scheme (that's not a typo!). - - This is because HTTP proxying requires initiating a connection with the proxy server. While it's possible that your proxy supports doing it via HTTPS, most proxies only support doing it via HTTP. - - For more information, see [FORWARD vs TUNNEL](#forward-vs-tunnel). - -### Authentication - -Proxy credentials can be passed as the `userinfo` section of the proxy URL. For example: - -```python -with httpx.Client(proxy="http://username:password@localhost:8030") as client: - ... -``` - -### Proxy mechanisms - -!!! note - This section describes **advanced** proxy concepts and functionality. - -#### FORWARD vs TUNNEL - -In general, the flow for making an HTTP request through a proxy is as follows: - -1. The client connects to the proxy (initial connection request). -2. The proxy transfers data to the server on your behalf. - -How exactly step 2/ is performed depends on which of two proxying mechanisms is used: - -* **Forwarding**: the proxy makes the request for you, and sends back the response it obtained from the server. -* **Tunnelling**: the proxy establishes a TCP connection to the server on your behalf, and the client reuses this connection to send the request and receive the response. This is known as an [HTTP Tunnel](https://en.wikipedia.org/wiki/HTTP_tunnel). This mechanism is how you can access websites that use HTTPS from an HTTP proxy (the client "upgrades" the connection to HTTPS by performing the TLS handshake with the server over the TCP connection provided by the proxy). - -### Troubleshooting proxies - -If you encounter issues when setting up proxies, please refer to our [Troubleshooting guide](troubleshooting.md#proxies). - -## SOCKS - -In addition to HTTP proxies, `httpcore` also supports proxies using the SOCKS protocol. -This is an optional feature that requires an additional third-party library be installed before use. - -You can install SOCKS support using `pip`: - -```shell -$ pip install httpx[socks] -``` - -You can now configure a client to make requests via a proxy using the SOCKS protocol: - -```python -httpx.Client(proxy='socks5://user:pass@host:port') -``` - -## Timeout Configuration - -HTTPX is careful to enforce timeouts everywhere by default. - -The default behavior is to raise a `TimeoutException` after 5 seconds of -network inactivity. - -### Setting and disabling timeouts - -You can set timeouts for an individual request: - -```python -# Using the top-level API: -httpx.get('http://example.com/api/v1/example', timeout=10.0) - -# Using a client instance: -with httpx.Client() as client: - client.get("http://example.com/api/v1/example", timeout=10.0) -``` - -Or disable timeouts for an individual request: - -```python -# Using the top-level API: -httpx.get('http://example.com/api/v1/example', timeout=None) - -# Using a client instance: -with httpx.Client() as client: - client.get("http://example.com/api/v1/example", timeout=None) -``` - -### Setting a default timeout on a client - -You can set a timeout on a client instance, which results in the given -`timeout` being used as the default for requests made with this client: - -```python -client = httpx.Client() # Use a default 5s timeout everywhere. -client = httpx.Client(timeout=10.0) # Use a default 10s timeout everywhere. -client = httpx.Client(timeout=None) # Disable all timeouts by default. -``` - -### Fine tuning the configuration - -HTTPX also allows you to specify the timeout behavior in more fine grained detail. - -There are four different types of timeouts that may occur. These are **connect**, -**read**, **write**, and **pool** timeouts. - -* The **connect** timeout specifies the maximum amount of time to wait until -a socket connection to the requested host is established. If HTTPX is unable to connect -within this time frame, a `ConnectTimeout` exception is raised. -* The **read** timeout specifies the maximum duration to wait for a chunk of -data to be received (for example, a chunk of the response body). If HTTPX is -unable to receive data within this time frame, a `ReadTimeout` exception is raised. -* The **write** timeout specifies the maximum duration to wait for a chunk of -data to be sent (for example, a chunk of the request body). If HTTPX is unable -to send data within this time frame, a `WriteTimeout` exception is raised. -* The **pool** timeout specifies the maximum duration to wait for acquiring -a connection from the connection pool. If HTTPX is unable to acquire a connection -within this time frame, a `PoolTimeout` exception is raised. A related -configuration here is the maximum number of allowable connections in the -connection pool, which is configured by the `limits` argument. - -You can configure the timeout behavior for any of these values... - -```python -# A client with a 60s timeout for connecting, and a 10s timeout elsewhere. -timeout = httpx.Timeout(10.0, connect=60.0) -client = httpx.Client(timeout=timeout) - -response = client.get('http://example.com/') -``` - -## Pool limit configuration - -You can control the connection pool size using the `limits` keyword -argument on the client. It takes instances of `httpx.Limits` which define: - -- `max_keepalive_connections`, number of allowable keep-alive connections, or `None` to always -allow. (Defaults 20) -- `max_connections`, maximum number of allowable connections, or `None` for no limits. -(Default 100) -- `keepalive_expiry`, time limit on idle keep-alive connections in seconds, or `None` for no limits. (Default 5) - -```python -limits = httpx.Limits(max_keepalive_connections=5, max_connections=10) -client = httpx.Client(limits=limits) -``` - -## Multipart file encoding - -As mentioned in the [quickstart](quickstart.md#sending-multipart-file-uploads) -multipart file encoding is available by passing a dictionary with the -name of the payloads as keys and either tuple of elements or a file-like object or a string as values. - -```pycon ->>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')} ->>> r = httpx.post("https://httpbin.org/post", files=files) ->>> print(r.text) -{ - ... - "files": { - "upload-file": "<... binary content ...>" - }, - ... -} -``` - -More specifically, if a tuple is used as a value, it must have between 2 and 3 elements: - -- The first element is an optional file name which can be set to `None`. -- The second element may be a file-like object or a string which will be automatically -encoded in UTF-8. -- An optional third element can be used to specify the -[MIME type](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_Types) -of the file being uploaded. If not specified HTTPX will attempt to guess the MIME type based -on the file name, with unknown file extensions defaulting to "application/octet-stream". -If the file name is explicitly set to `None` then HTTPX will not include a content-type -MIME header field. - -```pycon ->>> files = {'upload-file': (None, 'text content', 'text/plain')} ->>> r = httpx.post("https://httpbin.org/post", files=files) ->>> print(r.text) -{ - ... - "files": {}, - "form": { - "upload-file": "text-content" - }, - ... -} -``` - -!!! tip - It is safe to upload large files this way. File uploads are streaming by default, meaning that only one chunk will be loaded into memory at a time. - - Non-file data fields can be included in the multipart form using by passing them to `data=...`. - -You can also send multiple files in one go with a multiple file field form. -To do that, pass a list of `(field, )` items instead of a dictionary, allowing you to pass multiple items with the same `field`. -For instance this request sends 2 files, `foo.png` and `bar.png` in one request on the `images` form field: - -```pycon ->>> files = [('images', ('foo.png', open('foo.png', 'rb'), 'image/png')), - ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))] ->>> r = httpx.post("https://httpbin.org/post", files=files) -``` - -## Customizing authentication - -When issuing requests or instantiating a client, the `auth` argument can be used to pass an authentication scheme to use. The `auth` argument may be one of the following... - -* A two-tuple of `username`/`password`, to be used with basic authentication. -* An instance of `httpx.BasicAuth()`, `httpx.DigestAuth()`, or `httpx.NetRCAuth()`. -* A callable, accepting a request and returning an authenticated request instance. -* An instance of subclasses of `httpx.Auth`. - -The most involved of these is the last, which allows you to create authentication flows involving one or more requests. A subclass of `httpx.Auth` should implement `def auth_flow(request)`, and yield any requests that need to be made... - -```python -class MyCustomAuth(httpx.Auth): - def __init__(self, token): - self.token = token - - def auth_flow(self, request): - # Send the request, with a custom `X-Authentication` header. - request.headers['X-Authentication'] = self.token - yield request -``` - -If the auth flow requires more than one request, you can issue multiple yields, and obtain the response in each case... - -```python -class MyCustomAuth(httpx.Auth): - def __init__(self, token): - self.token = token - - def auth_flow(self, request): - response = yield request - if response.status_code == 401: - # If the server issues a 401 response then resend the request, - # with a custom `X-Authentication` header. - request.headers['X-Authentication'] = self.token - yield request -``` - -Custom authentication classes are designed to not perform any I/O, so that they may be used with both sync and async client instances. If you are implementing an authentication scheme that requires the request body, then you need to indicate this on the class using a `requires_request_body` property. - -You will then be able to access `request.content` inside the `.auth_flow()` method. - -```python -class MyCustomAuth(httpx.Auth): - requires_request_body = True - - def __init__(self, token): - self.token = token - - def auth_flow(self, request): - response = yield request - if response.status_code == 401: - # If the server issues a 401 response then resend the request, - # with a custom `X-Authentication` header. - request.headers['X-Authentication'] = self.sign_request(...) - yield request - - def sign_request(self, request): - # Create a request signature, based on `request.method`, `request.url`, - # `request.headers`, and `request.content`. - ... -``` - -Similarly, if you are implementing a scheme that requires access to the response body, then use the `requires_response_body` property. You will then be able to access response body properties and methods such as `response.content`, `response.text`, `response.json()`, etc. - -```python -class MyCustomAuth(httpx.Auth): - requires_response_body = True - - def __init__(self, access_token, refresh_token, refresh_url): - self.access_token = access_token - self.refresh_token = refresh_token - self.refresh_url = refresh_url - - def auth_flow(self, request): - request.headers["X-Authentication"] = self.access_token - response = yield request - - if response.status_code == 401: - # If the server issues a 401 response, then issue a request to - # refresh tokens, and resend the request. - refresh_response = yield self.build_refresh_request() - self.update_tokens(refresh_response) - - request.headers["X-Authentication"] = self.access_token - yield request - - def build_refresh_request(self): - # Return an `httpx.Request` for refreshing tokens. - ... - - def update_tokens(self, response): - # Update the `.access_token` and `.refresh_token` tokens - # based on a refresh response. - data = response.json() - ... -``` - -If you _do_ need to perform I/O other than HTTP requests, such as accessing a disk-based cache, or you need to use concurrency primitives, such as locks, then you should override `.sync_auth_flow()` and `.async_auth_flow()` (instead of `.auth_flow()`). The former will be used by `httpx.Client`, while the latter will be used by `httpx.AsyncClient`. - -```python -import asyncio -import threading -import httpx - - -class MyCustomAuth(httpx.Auth): - def __init__(self): - self._sync_lock = threading.RLock() - self._async_lock = asyncio.Lock() - - def sync_get_token(self): - with self._sync_lock: - ... - - def sync_auth_flow(self, request): - token = self.sync_get_token() - request.headers["Authorization"] = f"Token {token}" - yield request - - async def async_get_token(self): - async with self._async_lock: - ... - - async def async_auth_flow(self, request): - token = await self.async_get_token() - request.headers["Authorization"] = f"Token {token}" - yield request -``` - -If you only want to support one of the two methods, then you should still override it, but raise an explicit `RuntimeError`. - -```python -import httpx -import sync_only_library - - -class MyCustomAuth(httpx.Auth): - def sync_auth_flow(self, request): - token = sync_only_library.get_token(...) - request.headers["Authorization"] = f"Token {token}" - yield request - - async def async_auth_flow(self, request): - raise RuntimeError("Cannot use a sync authentication class with httpx.AsyncClient") -``` - -## SSL certificates - -When making a request over HTTPS, HTTPX needs to verify the identity of the requested host. To do this, it uses a bundle of SSL certificates (a.k.a. CA bundle) delivered by a trusted certificate authority (CA). - -### Changing the verification defaults - -By default, HTTPX uses the CA bundle provided by [Certifi](https://pypi.org/project/certifi/). This is what you want in most cases, even though some advanced situations may require you to use a different set of certificates. - -If you'd like to use a custom CA bundle, you can use the `verify` parameter. - -```python -import httpx - -r = httpx.get("https://example.org", verify="path/to/client.pem") -``` - -Alternatively, you can pass a standard library `ssl.SSLContext`. - -```pycon ->>> import ssl ->>> import httpx ->>> context = ssl.create_default_context() ->>> context.load_verify_locations(cafile="/tmp/client.pem") ->>> httpx.get('https://example.org', verify=context) - -``` - -We also include a helper function for creating properly configured `SSLContext` instances. - -```pycon ->>> context = httpx.create_ssl_context() -``` - -The `create_ssl_context` function accepts the same set of SSL configuration arguments -(`trust_env`, `verify`, `cert` and `http2` arguments) -as `httpx.Client` or `httpx.AsyncClient` - -```pycon ->>> import httpx ->>> context = httpx.create_ssl_context(verify="/tmp/client.pem") ->>> httpx.get('https://example.org', verify=context) - -``` - -Or you can also disable the SSL verification entirely, which is _not_ recommended. - -```python -import httpx - -r = httpx.get("https://example.org", verify=False) -``` - -### SSL configuration on client instances - -If you're using a `Client()` instance, then you should pass any SSL settings when instantiating the client. - -```python -client = httpx.Client(verify=False) -``` - -The `client.get(...)` method and other request methods *do not* support changing the SSL settings on a per-request basis. If you need different SSL settings in different cases you should use more that one client instance, with different settings on each. Each client will then be using an isolated connection pool with a specific fixed SSL configuration on all connections within that pool. - -### Client Side Certificates - -You can also specify a local cert to use as a client-side certificate, either a path to an SSL certificate file, or two-tuple of (certificate file, key file), or a three-tuple of (certificate file, key file, password) - -```python -import httpx - -r = httpx.get("https://example.org", cert="path/to/client.pem") -``` - -Alternatively, - -```pycon ->>> cert = ("path/to/client.pem", "path/to/client.key") ->>> httpx.get("https://example.org", cert=cert) - -``` - -or - -```pycon ->>> cert = ("path/to/client.pem", "path/to/client.key", "password") ->>> httpx.get("https://example.org", cert=cert) - -``` - -### Making HTTPS requests to a local server - -When making requests to local servers, such as a development server running on `localhost`, you will typically be using unencrypted HTTP connections. - -If you do need to make HTTPS connections to a local server, for example to test an HTTPS-only service, you will need to create and use your own certificates. Here's one way to do it: - -1. Use [trustme](https://github.com/python-trio/trustme) to generate a pair of server key/cert files, and a client cert file. -1. Pass the server key/cert files when starting your local server. (This depends on the particular web server you're using. For example, [Uvicorn](https://www.uvicorn.org) provides the `--ssl-keyfile` and `--ssl-certfile` options.) -1. Tell HTTPX to use the certificates stored in `client.pem`: - -```pycon ->>> import httpx ->>> r = httpx.get("https://localhost:8000", verify="/tmp/client.pem") ->>> r -Response <200 OK> -``` - -## Custom Transports - -HTTPX's `Client` also accepts a `transport` argument. This argument allows you -to provide a custom Transport object that will be used to perform the actual -sending of the requests. - -### Usage - -For some advanced configuration you might need to instantiate a transport -class directly, and pass it to the client instance. One example is the -`local_address` configuration which is only available via this low-level API. - -```pycon ->>> import httpx ->>> transport = httpx.HTTPTransport(local_address="0.0.0.0") ->>> client = httpx.Client(transport=transport) -``` - -Connection retries are also available via this interface. Requests will be retried the given number of times in case an `httpx.ConnectError` or an `httpx.ConnectTimeout` occurs, allowing smoother operation under flaky networks. If you need other forms of retry behaviors, such as handling read/write errors or reacting to `503 Service Unavailable`, consider general-purpose tools such as [tenacity](https://github.com/jd/tenacity). - -```pycon ->>> import httpx ->>> transport = httpx.HTTPTransport(retries=1) ->>> client = httpx.Client(transport=transport) -``` - -Similarly, instantiating a transport directly provides a `uds` option for -connecting via a Unix Domain Socket that is only available via this low-level API: - -```pycon ->>> import httpx ->>> # Connect to the Docker API via a Unix Socket. ->>> transport = httpx.HTTPTransport(uds="/var/run/docker.sock") ->>> client = httpx.Client(transport=transport) ->>> response = client.get("http://docker/info") ->>> response.json() -{"ID": "...", "Containers": 4, "Images": 74, ...} -``` - -### urllib3 transport - -This [public gist](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e) provides a transport that uses the excellent [`urllib3` library](https://urllib3.readthedocs.io/en/latest/), and can be used with the sync `Client`... - -```pycon ->>> import httpx ->>> from urllib3_transport import URLLib3Transport ->>> client = httpx.Client(transport=URLLib3Transport()) ->>> client.get("https://example.org") - -``` - -### Writing custom transports - -A transport instance must implement the low-level Transport API, which deals -with sending a single request, and returning a response. You should either -subclass `httpx.BaseTransport` to implement a transport to use with `Client`, -or subclass `httpx.AsyncBaseTransport` to implement a transport to -use with `AsyncClient`. - -At the layer of the transport API we're using the familiar `Request` and -`Response` models. - -See the `handle_request` and `handle_async_request` docstrings for more details -on the specifics of the Transport API. - -A complete example of a custom transport implementation would be: - -```python -import json -import httpx - - -class HelloWorldTransport(httpx.BaseTransport): - """ - A mock transport that always returns a JSON "Hello, world!" response. - """ - - def handle_request(self, request): - message = {"text": "Hello, world!"} - content = json.dumps(message).encode("utf-8") - stream = httpx.ByteStream(content) - headers = [(b"content-type", b"application/json")] - return httpx.Response(200, headers=headers, stream=stream) -``` - -Which we can use in the same way: - -```pycon ->>> import httpx ->>> client = httpx.Client(transport=HelloWorldTransport()) ->>> response = client.get("https://example.org/") ->>> response.json() -{"text": "Hello, world!"} -``` - -### Mock transports - -During testing it can often be useful to be able to mock out a transport, -and return pre-determined responses, rather than making actual network requests. - -The `httpx.MockTransport` class accepts a handler function, which can be used -to map requests onto pre-determined responses: - -```python -def handler(request): - return httpx.Response(200, json={"text": "Hello, world!"}) - - -# Switch to a mock transport, if the TESTING environment variable is set. -if os.environ.get('TESTING', '').upper() == "TRUE": - transport = httpx.MockTransport(handler) -else: - transport = httpx.HTTPTransport() - -client = httpx.Client(transport=transport) -``` - -For more advanced use-cases you might want to take a look at either [the third-party -mocking library, RESPX](https://lundberg.github.io/respx/), or the [pytest-httpx library](https://github.com/Colin-b/pytest_httpx). - -### Mounting transports - -You can also mount transports against given schemes or domains, to control -which transport an outgoing request should be routed via, with [the same style -used for specifying proxy routing](#routing). - -```python -import httpx - -class HTTPSRedirectTransport(httpx.BaseTransport): - """ - A transport that always redirects to HTTPS. - """ - - def handle_request(self, method, url, headers, stream, extensions): - scheme, host, port, path = url - if port is None: - location = b"https://%s%s" % (host, path) - else: - location = b"https://%s:%d%s" % (host, port, path) - stream = httpx.ByteStream(b"") - headers = [(b"location", location)] - extensions = {} - return 303, headers, stream, extensions - - -# A client where any `http` requests are always redirected to `https` -mounts = {'http://': HTTPSRedirectTransport()} -client = httpx.Client(mounts=mounts) -``` - -A couple of other sketches of how you might take advantage of mounted transports... - -Disabling HTTP/2 on a single given domain... - -```python -mounts = { - "all://": httpx.HTTPTransport(http2=True), - "all://*example.org": httpx.HTTPTransport() -} -client = httpx.Client(mounts=mounts) -``` - -Mocking requests to a given domain: - -```python -# All requests to "example.org" should be mocked out. -# Other requests occur as usual. -def handler(request): - return httpx.Response(200, json={"text": "Hello, World!"}) - -mounts = {"all://example.org": httpx.MockTransport(handler)} -client = httpx.Client(mounts=mounts) -``` - -Adding support for custom schemes: - -```python -# Support URLs like "file:///Users/sylvia_green/websites/new_client/index.html" -mounts = {"file://": FileSystemTransport()} -client = httpx.Client(mounts=mounts) -``` - -### Routing - -HTTPX provides a powerful mechanism for routing requests, allowing you to write complex rules that specify which transport should be used for each request. - -The `mounts` dictionary maps URL patterns to HTTP transports. HTTPX matches requested URLs against URL patterns to decide which transport should be used, if any. Matching is done from most specific URL patterns (e.g. `https://:`) to least specific ones (e.g. `https://`). - -HTTPX supports routing requests based on **scheme**, **domain**, **port**, or a combination of these. - -#### Wildcard routing - -Route everything through a transport... - -```python -mounts = { - "all://": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -#### Scheme routing - -Route HTTP requests through one transport, and HTTPS requests through another... - -```python -mounts = { - "http://": httpx.HTTPTransport(proxy="http://localhost:8030"), - "https://": httpx.HTTPTransport(proxy="http://localhost:8031"), -} -``` - -#### Domain routing - -Proxy all requests on domain "example.com", let other requests pass through... - -```python -mounts = { - "all://example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -Proxy HTTP requests on domain "example.com", let HTTPS and other requests pass through... - -```python -mounts = { - "http://example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -Proxy all requests to "example.com" and its subdomains, let other requests pass through... - -```python -mounts = { - "all://*example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -Proxy all requests to strict subdomains of "example.com", let "example.com" and other requests pass through... - -```python -mounts = { - "all://*.example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -#### Port routing - -Proxy HTTPS requests on port 1234 to "example.com"... - -```python -mounts = { - "https://example.com:1234": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -Proxy all requests on port 1234... - -```python -mounts = { - "all://*:1234": httpx.HTTPTransport(proxy="http://localhost:8030"), -} -``` - -#### No-proxy support - -It is also possible to define requests that _shouldn't_ be routed through the transport. - -To do so, pass `None` as the proxy URL. For example... - -```python -mounts = { - # Route requests through a proxy by default... - "all://": httpx.HTTPTransport(proxy="http://localhost:8031"), - # Except those for "example.com". - "all://example.com": None, -} -``` - -#### Complex configuration example - -You can combine the routing features outlined above to build complex proxy routing configurations. For example... - -```python -mounts = { - # Route all traffic through a proxy by default... - "all://": httpx.HTTPTransport(proxy="http://localhost:8030"), - # But don't use proxies for HTTPS requests to "domain.io"... - "https://domain.io": None, - # And use another proxy for requests to "example.com" and its subdomains... - "all://*example.com": httpx.HTTPTransport(proxy="http://localhost:8031"), - # And yet another proxy if HTTP is used, - # and the "internal" subdomain on port 5550 is requested... - "http://internal.example.com:5550": httpx.HTTPTransport(proxy="http://localhost:8032"), -} -``` - -#### Environment variables - -There are also environment variables that can be used to control the dictionary of the client mounts. -They can be used to configure HTTP proxying for clients. - -See documentation on [`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`](environment_variables.md#http_proxy-https_proxy-all_proxy) for more information. - diff --git a/docs/advanced/authentication.md b/docs/advanced/authentication.md new file mode 100644 index 0000000000..edcc15f815 --- /dev/null +++ b/docs/advanced/authentication.md @@ -0,0 +1,232 @@ +Authentication can either be included on a per-request basis... + +```pycon +>>> auth = httpx.BasicAuthentication(username="username", password="secret") +>>> client = httpx.Client() +>>> response = client.get("https://www.example.com/", auth=auth) +``` + +Or configured on the client instance, ensuring that all outgoing requests will include authentication credentials... + +```pycon +>>> auth = httpx.BasicAuthentication(username="username", password="secret") +>>> client = httpx.Client(auth=auth) +>>> response = client.get("https://www.example.com/") +``` + +## Basic authentication + +HTTP basic authentication is an unencrypted authentication scheme that uses a simple encoding of the username and password in the request `Authorization` header. Since it is unencrypted it should typically only be used over `https`, although this is not strictly enforced. + +```pycon +>>> auth = httpx.BasicAuthentication(username="finley", password="secret") +>>> client = httpx.Client(auth=auth) +>>> response = client.get("https://httpbin.org/basic-auth/finley/secret") +>>> response + +``` + +## Digest authentication + +HTTP digest authentication is a challenge-response authentication scheme. Unlike basic authentication it provides encryption, and can be used over unencrypted `http` connections. It requires an additional round-trip in order to negotiate the authentication. + +```pycon +>>> auth = httpx.DigestAuth(username="olivia", password="secret") +>>> client = httpx.Client(auth=auth) +>>> response = client.get("https://httpbin.org/digest-auth/auth/olivia/secret") +>>> response + +>>> response.history +[] +``` + +## NetRC authentication + +HTTPX can be configured to use [a `.netrc` config file](https://everything.curl.dev/usingcurl/netrc) for authentication. + +The `.netrc` config file allows authentication credentials to be associated with specified hosts. When a request is made to a host that is found in the netrc file, the username and password will be included using HTTP basic authentication. + +Example `.netrc` file: + +``` +machine example.org +login example-username +password example-password + +machine python-httpx.org +login other-username +password other-password +``` + +Some examples of configuring `.netrc` authentication with `httpx`. + +Use the default `.netrc` file in the users home directory: + +```pycon +>>> auth = httpx.NetRCAuth() +>>> client = httpx.Client(auth=auth) +``` + +Use an explicit path to a `.netrc` file: + +```pycon +>>> auth = httpx.NetRCAuth(file="/path/to/.netrc") +>>> client = httpx.Client(auth=auth) +``` + +Use the `NETRC` environment variable to configure a path to the `.netrc` file, +or fallback to the default. + +```pycon +>>> auth = httpx.NetRCAuth(file=os.environ.get("NETRC")) +>>> client = httpx.Client(auth=auth) +``` + +The `NetRCAuth()` class uses [the `netrc.netrc()` function from the Python standard library](https://docs.python.org/3/library/netrc.html). See the documentation there for more details on exceptions that may be raised if the `.netrc` file is not found, or cannot be parsed. + +## Custom authentication schemes + +When issuing requests or instantiating a client, the `auth` argument can be used to pass an authentication scheme to use. The `auth` argument may be one of the following... + +* A two-tuple of `username`/`password`, to be used with basic authentication. +* An instance of `httpx.BasicAuth()`, `httpx.DigestAuth()`, or `httpx.NetRCAuth()`. +* A callable, accepting a request and returning an authenticated request instance. +* An instance of subclasses of `httpx.Auth`. + +The most involved of these is the last, which allows you to create authentication flows involving one or more requests. A subclass of `httpx.Auth` should implement `def auth_flow(request)`, and yield any requests that need to be made... + +```python +class MyCustomAuth(httpx.Auth): + def __init__(self, token): + self.token = token + + def auth_flow(self, request): + # Send the request, with a custom `X-Authentication` header. + request.headers['X-Authentication'] = self.token + yield request +``` + +If the auth flow requires more than one request, you can issue multiple yields, and obtain the response in each case... + +```python +class MyCustomAuth(httpx.Auth): + def __init__(self, token): + self.token = token + + def auth_flow(self, request): + response = yield request + if response.status_code == 401: + # If the server issues a 401 response then resend the request, + # with a custom `X-Authentication` header. + request.headers['X-Authentication'] = self.token + yield request +``` + +Custom authentication classes are designed to not perform any I/O, so that they may be used with both sync and async client instances. If you are implementing an authentication scheme that requires the request body, then you need to indicate this on the class using a `requires_request_body` property. + +You will then be able to access `request.content` inside the `.auth_flow()` method. + +```python +class MyCustomAuth(httpx.Auth): + requires_request_body = True + + def __init__(self, token): + self.token = token + + def auth_flow(self, request): + response = yield request + if response.status_code == 401: + # If the server issues a 401 response then resend the request, + # with a custom `X-Authentication` header. + request.headers['X-Authentication'] = self.sign_request(...) + yield request + + def sign_request(self, request): + # Create a request signature, based on `request.method`, `request.url`, + # `request.headers`, and `request.content`. + ... +``` + +Similarly, if you are implementing a scheme that requires access to the response body, then use the `requires_response_body` property. You will then be able to access response body properties and methods such as `response.content`, `response.text`, `response.json()`, etc. + +```python +class MyCustomAuth(httpx.Auth): + requires_response_body = True + + def __init__(self, access_token, refresh_token, refresh_url): + self.access_token = access_token + self.refresh_token = refresh_token + self.refresh_url = refresh_url + + def auth_flow(self, request): + request.headers["X-Authentication"] = self.access_token + response = yield request + + if response.status_code == 401: + # If the server issues a 401 response, then issue a request to + # refresh tokens, and resend the request. + refresh_response = yield self.build_refresh_request() + self.update_tokens(refresh_response) + + request.headers["X-Authentication"] = self.access_token + yield request + + def build_refresh_request(self): + # Return an `httpx.Request` for refreshing tokens. + ... + + def update_tokens(self, response): + # Update the `.access_token` and `.refresh_token` tokens + # based on a refresh response. + data = response.json() + ... +``` + +If you _do_ need to perform I/O other than HTTP requests, such as accessing a disk-based cache, or you need to use concurrency primitives, such as locks, then you should override `.sync_auth_flow()` and `.async_auth_flow()` (instead of `.auth_flow()`). The former will be used by `httpx.Client`, while the latter will be used by `httpx.AsyncClient`. + +```python +import asyncio +import threading +import httpx + + +class MyCustomAuth(httpx.Auth): + def __init__(self): + self._sync_lock = threading.RLock() + self._async_lock = asyncio.Lock() + + def sync_get_token(self): + with self._sync_lock: + ... + + def sync_auth_flow(self, request): + token = self.sync_get_token() + request.headers["Authorization"] = f"Token {token}" + yield request + + async def async_get_token(self): + async with self._async_lock: + ... + + async def async_auth_flow(self, request): + token = await self.async_get_token() + request.headers["Authorization"] = f"Token {token}" + yield request +``` + +If you only want to support one of the two methods, then you should still override it, but raise an explicit `RuntimeError`. + +```python +import httpx +import sync_only_library + + +class MyCustomAuth(httpx.Auth): + def sync_auth_flow(self, request): + token = sync_only_library.get_token(...) + request.headers["Authorization"] = f"Token {token}" + yield request + + async def async_auth_flow(self, request): + raise RuntimeError("Cannot use a sync authentication class with httpx.AsyncClient") +``` \ No newline at end of file diff --git a/docs/advanced/clients.md b/docs/advanced/clients.md new file mode 100644 index 0000000000..6905724dcd --- /dev/null +++ b/docs/advanced/clients.md @@ -0,0 +1,324 @@ +!!! hint + If you are coming from Requests, `httpx.Client()` is what you can use instead of `requests.Session()`. + +## Why use a Client? + +!!! note "TL;DR" + If you do anything more than experimentation, one-off scripts, or prototypes, then you should use a `Client` instance. + +**More efficient usage of network resources** + +When you make requests using the top-level API as documented in the [Quickstart](quickstart.md) guide, HTTPX has to establish a new connection _for every single request_ (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient. + +On the other hand, a `Client` instance uses [HTTP connection pooling](https://en.wikipedia.org/wiki/HTTP_persistent_connection). This means that when you make several requests to the same host, the `Client` will reuse the underlying TCP connection, instead of recreating one for every single request. + +This can bring **significant performance improvements** compared to using the top-level API, including: + +- Reduced latency across requests (no handshaking). +- Reduced CPU usage and round-trips. +- Reduced network congestion. + +**Extra features** + +`Client` instances also support features that aren't available at the top-level API, such as: + +- Cookie persistence across requests. +- Applying configuration across all outgoing requests. +- Sending requests through HTTP proxies. +- Using [HTTP/2](http2.md). + +The other sections on this page go into further detail about what you can do with a `Client` instance. + +## Usage + +The recommended way to use a `Client` is as a context manager. This will ensure that connections are properly cleaned up when leaving the `with` block: + +```python +with httpx.Client() as client: + ... +``` + +Alternatively, you can explicitly close the connection pool without block-usage using `.close()`: + +```python +client = httpx.Client() +try: + ... +finally: + client.close() +``` + +## Making requests + +Once you have a `Client`, you can send requests using `.get()`, `.post()`, etc. For example: + +```pycon +>>> with httpx.Client() as client: +... r = client.get('https://example.com') +... +>>> r + +``` + +These methods accept the same arguments as `httpx.get()`, `httpx.post()`, etc. This means that all features documented in the [Quickstart](quickstart.md) guide are also available at the client level. + +For example, to send a request with custom headers: + +```pycon +>>> with httpx.Client() as client: +... headers = {'X-Custom': 'value'} +... r = client.get('https://example.com', headers=headers) +... +>>> r.request.headers['X-Custom'] +'value' +``` + +## Sharing configuration across requests + +Clients allow you to apply configuration to all outgoing requests by passing parameters to the `Client` constructor. + +For example, to apply a set of custom headers _on every request_: + +```pycon +>>> url = 'http://httpbin.org/headers' +>>> headers = {'user-agent': 'my-app/0.0.1'} +>>> with httpx.Client(headers=headers) as client: +... r = client.get(url) +... +>>> r.json()['headers']['User-Agent'] +'my-app/0.0.1' +``` + +## Merging of configuration + +When a configuration option is provided at both the client-level and request-level, one of two things can happen: + +- For headers, query parameters and cookies, the values are combined together. For example: + +```pycon +>>> headers = {'X-Auth': 'from-client'} +>>> params = {'client_id': 'client1'} +>>> with httpx.Client(headers=headers, params=params) as client: +... headers = {'X-Custom': 'from-request'} +... params = {'request_id': 'request1'} +... r = client.get('https://example.com', headers=headers, params=params) +... +>>> r.request.url +URL('https://example.com?client_id=client1&request_id=request1') +>>> r.request.headers['X-Auth'] +'from-client' +>>> r.request.headers['X-Custom'] +'from-request' +``` + +- For all other parameters, the request-level value takes priority. For example: + +```pycon +>>> with httpx.Client(auth=('tom', 'mot123')) as client: +... r = client.get('https://example.com', auth=('alice', 'ecila123')) +... +>>> _, _, auth = r.request.headers['Authorization'].partition(' ') +>>> import base64 +>>> base64.b64decode(auth) +b'alice:ecila123' +``` + +If you need finer-grained control on the merging of client-level and request-level parameters, see [Request instances](#request-instances). + +## Other Client-only configuration options + +Additionally, `Client` accepts some configuration options that aren't available at the request level. + +For example, `base_url` allows you to prepend an URL to all outgoing requests: + +```pycon +>>> with httpx.Client(base_url='http://httpbin.org') as client: +... r = client.get('/headers') +... +>>> r.request.url +URL('http://httpbin.org/headers') +``` + +For a list of all available client parameters, see the [`Client`](api.md#client) API reference. + +--- + +## Request instances + +For maximum control on what gets sent over the wire, HTTPX supports building explicit [`Request`](api.md#request) instances: + +```python +request = httpx.Request("GET", "https://example.com") +``` + +To dispatch a `Request` instance across to the network, create a [`Client` instance](#client-instances) and use `.send()`: + +```python +with httpx.Client() as client: + response = client.send(request) + ... +``` + +If you need to mix client-level and request-level options in a way that is not supported by the default [Merging of parameters](#merging-of-parameters), you can use `.build_request()` and then make arbitrary modifications to the `Request` instance. For example: + +```python +headers = {"X-Api-Key": "...", "X-Client-ID": "ABC123"} + +with httpx.Client(headers=headers) as client: + request = client.build_request("GET", "https://api.example.com") + + print(request.headers["X-Client-ID"]) # "ABC123" + + # Don't send the API key for this particular request. + del request.headers["X-Api-Key"] + + response = client.send(request) + ... +``` + +## Monitoring download progress + +If you need to monitor download progress of large responses, you can use response streaming and inspect the `response.num_bytes_downloaded` property. + +This interface is required for properly determining download progress, because the total number of bytes returned by `response.content` or `response.iter_content()` will not always correspond with the raw content length of the response if HTTP response compression is being used. + +For example, showing a progress bar using the [`tqdm`](https://github.com/tqdm/tqdm) library while a response is being downloaded could be done like this… + +```python +import tempfile + +import httpx +from tqdm import tqdm + +with tempfile.NamedTemporaryFile() as download_file: + url = "https://speed.hetzner.de/100MB.bin" + with httpx.stream("GET", url) as response: + total = int(response.headers["Content-Length"]) + + with tqdm(total=total, unit_scale=True, unit_divisor=1024, unit="B") as progress: + num_bytes_downloaded = response.num_bytes_downloaded + for chunk in response.iter_bytes(): + download_file.write(chunk) + progress.update(response.num_bytes_downloaded - num_bytes_downloaded) + num_bytes_downloaded = response.num_bytes_downloaded +``` + +![tqdm progress bar](img/tqdm-progress.gif) + +Or an alternate example, this time using the [`rich`](https://github.com/willmcgugan/rich) library… + +```python +import tempfile +import httpx +import rich.progress + +with tempfile.NamedTemporaryFile() as download_file: + url = "https://speed.hetzner.de/100MB.bin" + with httpx.stream("GET", url) as response: + total = int(response.headers["Content-Length"]) + + with rich.progress.Progress( + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + download_task = progress.add_task("Download", total=total) + for chunk in response.iter_bytes(): + download_file.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) +``` + +![rich progress bar](../img/rich-progress.gif) + +## Monitoring upload progress + +If you need to monitor upload progress of large responses, you can use request content generator streaming. + +For example, showing a progress bar using the [`tqdm`](https://github.com/tqdm/tqdm) library. + +```python +import io +import random + +import httpx +from tqdm import tqdm + + +def gen(): + """ + this is a complete example with generated random bytes. + you can replace `io.BytesIO` with real file object. + """ + total = 32 * 1024 * 1024 # 32m + with tqdm(ascii=True, unit_scale=True, unit='B', unit_divisor=1024, total=total) as bar: + with io.BytesIO(random.randbytes(total)) as f: + while data := f.read(1024): + yield data + bar.update(len(data)) + + +httpx.post("https://httpbin.org/post", content=gen()) +``` + +![tqdm progress bar](../img/tqdm-progress.gif) + +## Multipart file encoding + +As mentioned in the [quickstart](quickstart.md#sending-multipart-file-uploads) +multipart file encoding is available by passing a dictionary with the +name of the payloads as keys and either tuple of elements or a file-like object or a string as values. + +```pycon +>>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')} +>>> r = httpx.post("https://httpbin.org/post", files=files) +>>> print(r.text) +{ + ... + "files": { + "upload-file": "<... binary content ...>" + }, + ... +} +``` + +More specifically, if a tuple is used as a value, it must have between 2 and 3 elements: + +- The first element is an optional file name which can be set to `None`. +- The second element may be a file-like object or a string which will be automatically +encoded in UTF-8. +- An optional third element can be used to specify the +[MIME type](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_Types) +of the file being uploaded. If not specified HTTPX will attempt to guess the MIME type based +on the file name, with unknown file extensions defaulting to "application/octet-stream". +If the file name is explicitly set to `None` then HTTPX will not include a content-type +MIME header field. + +```pycon +>>> files = {'upload-file': (None, 'text content', 'text/plain')} +>>> r = httpx.post("https://httpbin.org/post", files=files) +>>> print(r.text) +{ + ... + "files": {}, + "form": { + "upload-file": "text-content" + }, + ... +} +``` + +!!! tip + It is safe to upload large files this way. File uploads are streaming by default, meaning that only one chunk will be loaded into memory at a time. + + Non-file data fields can be included in the multipart form using by passing them to `data=...`. + +You can also send multiple files in one go with a multiple file field form. +To do that, pass a list of `(field, )` items instead of a dictionary, allowing you to pass multiple items with the same `field`. +For instance this request sends 2 files, `foo.png` and `bar.png` in one request on the `images` form field: + +```pycon +>>> files = [('images', ('foo.png', open('foo.png', 'rb'), 'image/png')), + ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))] +>>> r = httpx.post("https://httpbin.org/post", files=files) +``` diff --git a/docs/advanced/event-hooks.md b/docs/advanced/event-hooks.md new file mode 100644 index 0000000000..28cf353d96 --- /dev/null +++ b/docs/advanced/event-hooks.md @@ -0,0 +1,65 @@ +HTTPX allows you to register "event hooks" with the client, that are called +every time a particular type of event takes place. + +There are currently two event hooks: + +* `request` - Called after a request is fully prepared, but before it is sent to the network. Passed the `request` instance. +* `response` - Called after the response has been fetched from the network, but before it is returned to the caller. Passed the `response` instance. + +These allow you to install client-wide functionality such as logging, monitoring or tracing. + +```python +def log_request(request): + print(f"Request event hook: {request.method} {request.url} - Waiting for response") + +def log_response(response): + request = response.request + print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}") + +client = httpx.Client(event_hooks={'request': [log_request], 'response': [log_response]}) +``` + +You can also use these hooks to install response processing code, such as this +example, which creates a client instance that always raises `httpx.HTTPStatusError` +on 4xx and 5xx responses. + +```python +def raise_on_4xx_5xx(response): + response.raise_for_status() + +client = httpx.Client(event_hooks={'response': [raise_on_4xx_5xx]}) +``` + +!!! note + Response event hooks are called before determining if the response body + should be read or not. + + If you need access to the response body inside an event hook, you'll + need to call `response.read()`, or for AsyncClients, `response.aread()`. + +The hooks are also allowed to modify `request` and `response` objects. + +```python +def add_timestamp(request): + request.headers['x-request-timestamp'] = datetime.now(tz=datetime.utc).isoformat() + +client = httpx.Client(event_hooks={'request': [add_timestamp]}) +``` + +Event hooks must always be set as a **list of callables**, and you may register +multiple event hooks for each type of event. + +As well as being able to set event hooks on instantiating the client, there +is also an `.event_hooks` property, that allows you to inspect and modify +the installed hooks. + +```python +client = httpx.Client() +client.event_hooks['request'] = [log_request] +client.event_hooks['response'] = [log_response, raise_on_4xx_5xx] +``` + +!!! note + If you are using HTTPX's async support, then you need to be aware that + hooks registered with `httpx.AsyncClient` MUST be async functions, + rather than plain functions. diff --git a/docs/advanced/proxies.md b/docs/advanced/proxies.md new file mode 100644 index 0000000000..d51b241a2c --- /dev/null +++ b/docs/advanced/proxies.md @@ -0,0 +1,83 @@ +HTTPX supports setting up [HTTP proxies](https://en.wikipedia.org/wiki/Proxy_server#Web_proxy_servers) via the `proxy` parameter to be passed on client initialization or top-level API functions like `httpx.get(..., proxy=...)`. + +
+ +
Diagram of how a proxy works (source: Wikipedia). The left hand side "Internet" blob may be your HTTPX client requesting example.com through a proxy.
+
+ +## HTTP Proxies + +To route all traffic (HTTP and HTTPS) to a proxy located at `http://localhost:8030`, pass the proxy URL to the client... + +```python +with httpx.Client(proxy="http://localhost:8030") as client: + ... +``` + +For more advanced use cases, pass a mounts `dict`. For example, to route HTTP and HTTPS requests to 2 different proxies, respectively located at `http://localhost:8030`, and `http://localhost:8031`, pass a `dict` of proxy URLs: + +```python +proxy_mounts = { + "http://": httpx.HTTPTransport(proxy="http://localhost:8030"), + "https://": httpx.HTTPTransport(proxy="http://localhost:8031"), +} + +with httpx.Client(mounts=proxy_mounts) as client: + ... +``` + +For detailed information about proxy routing, see the [Routing](#routing) section. + +!!! tip "Gotcha" + In most cases, the proxy URL for the `https://` key _should_ use the `http://` scheme (that's not a typo!). + + This is because HTTP proxying requires initiating a connection with the proxy server. While it's possible that your proxy supports doing it via HTTPS, most proxies only support doing it via HTTP. + + For more information, see [FORWARD vs TUNNEL](#forward-vs-tunnel). + +## Authentication + +Proxy credentials can be passed as the `userinfo` section of the proxy URL. For example: + +```python +with httpx.Client(proxy="http://username:password@localhost:8030") as client: + ... +``` + +## Proxy mechanisms + +!!! note + This section describes **advanced** proxy concepts and functionality. + +### FORWARD vs TUNNEL + +In general, the flow for making an HTTP request through a proxy is as follows: + +1. The client connects to the proxy (initial connection request). +2. The proxy transfers data to the server on your behalf. + +How exactly step 2/ is performed depends on which of two proxying mechanisms is used: + +* **Forwarding**: the proxy makes the request for you, and sends back the response it obtained from the server. +* **Tunnelling**: the proxy establishes a TCP connection to the server on your behalf, and the client reuses this connection to send the request and receive the response. This is known as an [HTTP Tunnel](https://en.wikipedia.org/wiki/HTTP_tunnel). This mechanism is how you can access websites that use HTTPS from an HTTP proxy (the client "upgrades" the connection to HTTPS by performing the TLS handshake with the server over the TCP connection provided by the proxy). + +### Troubleshooting proxies + +If you encounter issues when setting up proxies, please refer to our [Troubleshooting guide](troubleshooting.md#proxies). + +## SOCKS + +In addition to HTTP proxies, `httpcore` also supports proxies using the SOCKS protocol. +This is an optional feature that requires an additional third-party library be installed before use. + +You can install SOCKS support using `pip`: + +```shell +$ pip install httpx[socks] +``` + +You can now configure a client to make requests via a proxy using the SOCKS protocol: + +```python +httpx.Client(proxy='socks5://user:pass@host:port') +``` diff --git a/docs/advanced/resource-limits.md b/docs/advanced/resource-limits.md new file mode 100644 index 0000000000..2002428326 --- /dev/null +++ b/docs/advanced/resource-limits.md @@ -0,0 +1,13 @@ +You can control the connection pool size using the `limits` keyword +argument on the client. It takes instances of `httpx.Limits` which define: + +- `max_keepalive_connections`, number of allowable keep-alive connections, or `None` to always +allow. (Defaults 20) +- `max_connections`, maximum number of allowable connections, or `None` for no limits. +(Default 100) +- `keepalive_expiry`, time limit on idle keep-alive connections in seconds, or `None` for no limits. (Default 5) + +```python +limits = httpx.Limits(max_keepalive_connections=5, max_connections=10) +client = httpx.Client(limits=limits) +``` \ No newline at end of file diff --git a/docs/advanced/ssl.md b/docs/advanced/ssl.md new file mode 100644 index 0000000000..d96bbe1979 --- /dev/null +++ b/docs/advanced/ssl.md @@ -0,0 +1,100 @@ +When making a request over HTTPS, HTTPX needs to verify the identity of the requested host. To do this, it uses a bundle of SSL certificates (a.k.a. CA bundle) delivered by a trusted certificate authority (CA). + +## Changing the verification defaults + +By default, HTTPX uses the CA bundle provided by [Certifi](https://pypi.org/project/certifi/). This is what you want in most cases, even though some advanced situations may require you to use a different set of certificates. + +If you'd like to use a custom CA bundle, you can use the `verify` parameter. + +```python +import httpx + +r = httpx.get("https://example.org", verify="path/to/client.pem") +``` + +Alternatively, you can pass a standard library `ssl.SSLContext`. + +```pycon +>>> import ssl +>>> import httpx +>>> context = ssl.create_default_context() +>>> context.load_verify_locations(cafile="/tmp/client.pem") +>>> httpx.get('https://example.org', verify=context) + +``` + +We also include a helper function for creating properly configured `SSLContext` instances. + +```pycon +>>> context = httpx.create_ssl_context() +``` + +The `create_ssl_context` function accepts the same set of SSL configuration arguments +(`trust_env`, `verify`, `cert` and `http2` arguments) +as `httpx.Client` or `httpx.AsyncClient` + +```pycon +>>> import httpx +>>> context = httpx.create_ssl_context(verify="/tmp/client.pem") +>>> httpx.get('https://example.org', verify=context) + +``` + +Or you can also disable the SSL verification entirely, which is _not_ recommended. + +```python +import httpx + +r = httpx.get("https://example.org", verify=False) +``` + +## SSL configuration on client instances + +If you're using a `Client()` instance, then you should pass any SSL settings when instantiating the client. + +```python +client = httpx.Client(verify=False) +``` + +The `client.get(...)` method and other request methods *do not* support changing the SSL settings on a per-request basis. If you need different SSL settings in different cases you should use more that one client instance, with different settings on each. Each client will then be using an isolated connection pool with a specific fixed SSL configuration on all connections within that pool. + +## Client Side Certificates + +You can also specify a local cert to use as a client-side certificate, either a path to an SSL certificate file, or two-tuple of (certificate file, key file), or a three-tuple of (certificate file, key file, password) + +```python +cert = "path/to/client.pem" +client = httpx.Client(cert=cert) +response = client.get("https://example.org") +``` + +Alternatively... + +```python +cert = ("path/to/client.pem", "path/to/client.key") +client = httpx.Client(cert=cert) +response = client.get("https://example.org") +``` + +Or... + +```python +cert = ("path/to/client.pem", "path/to/client.key", "password") +client = httpx.Client(cert=cert) +response = client.get("https://example.org") +``` + +## Making HTTPS requests to a local server + +When making requests to local servers, such as a development server running on `localhost`, you will typically be using unencrypted HTTP connections. + +If you do need to make HTTPS connections to a local server, for example to test an HTTPS-only service, you will need to create and use your own certificates. Here's one way to do it: + +1. Use [trustme](https://github.com/python-trio/trustme) to generate a pair of server key/cert files, and a client cert file. +1. Pass the server key/cert files when starting your local server. (This depends on the particular web server you're using. For example, [Uvicorn](https://www.uvicorn.org) provides the `--ssl-keyfile` and `--ssl-certfile` options.) +1. Tell HTTPX to use the certificates stored in `client.pem`: + +```python +client = httpx.Client(verify="/tmp/client.pem") +response = client.get("https://localhost:8000") +``` diff --git a/docs/advanced/text-encodings.md b/docs/advanced/text-encodings.md new file mode 100644 index 0000000000..5565f02695 --- /dev/null +++ b/docs/advanced/text-encodings.md @@ -0,0 +1,75 @@ +When accessing `response.text`, we need to decode the response bytes into a unicode text representation. + +By default `httpx` will use `"charset"` information included in the response `Content-Type` header to determine how the response bytes should be decoded into text. + +In cases where no charset information is included on the response, the default behaviour is to assume "utf-8" encoding, which is by far the most widely used text encoding on the internet. + +## Using the default encoding + +To understand this better let's start by looking at the default behaviour for text decoding... + +```python +import httpx +# Instantiate a client with the default configuration. +client = httpx.Client() +# Using the client... +response = client.get(...) +print(response.encoding) # This will either print the charset given in + # the Content-Type charset, or else "utf-8". +print(response.text) # The text will either be decoded with the Content-Type + # charset, or using "utf-8". +``` + +This is normally absolutely fine. Most servers will respond with a properly formatted Content-Type header, including a charset encoding. And in most cases where no charset encoding is included, UTF-8 is very likely to be used, since it is so widely adopted. + +## Using an explicit encoding + +In some cases we might be making requests to a site where no character set information is being set explicitly by the server, but we know what the encoding is. In this case it's best to set the default encoding explicitly on the client. + +```python +import httpx +# Instantiate a client with a Japanese character set as the default encoding. +client = httpx.Client(default_encoding="shift-jis") +# Using the client... +response = client.get(...) +print(response.encoding) # This will either print the charset given in + # the Content-Type charset, or else "shift-jis". +print(response.text) # The text will either be decoded with the Content-Type + # charset, or using "shift-jis". +``` + +## Using auto-detection + +In cases where the server is not reliably including character set information, and where we don't know what encoding is being used, we can enable auto-detection to make a best-guess attempt when decoding from bytes to text. + +To use auto-detection you need to set the `default_encoding` argument to a callable instead of a string. This callable should be a function which takes the input bytes as an argument and returns the character set to use for decoding those bytes to text. + +There are two widely used Python packages which both handle this functionality: + +* [`chardet`](https://chardet.readthedocs.io/) - This is a well established package, and is a port of [the auto-detection code in Mozilla](https://www-archive.mozilla.org/projects/intl/chardet.html). +* [`charset-normalizer`](https://charset-normalizer.readthedocs.io/) - A newer package, motivated by `chardet`, with a different approach. + +Let's take a look at installing autodetection using one of these packages... + +```shell +$ pip install httpx +$ pip install chardet +``` + +Once `chardet` is installed, we can configure a client to use character-set autodetection. + +```python +import httpx +import chardet + +def autodetect(content): + return chardet.detect(content).get("encoding") + +# Using a client with character-set autodetection enabled. +client = httpx.Client(default_encoding=autodetect) +response = client.get(...) +print(response.encoding) # This will either print the charset given in + # the Content-Type charset, or else the auto-detected + # character set. +print(response.text) +``` diff --git a/docs/advanced/timeouts.md b/docs/advanced/timeouts.md new file mode 100644 index 0000000000..aedcfb627f --- /dev/null +++ b/docs/advanced/timeouts.md @@ -0,0 +1,71 @@ +HTTPX is careful to enforce timeouts everywhere by default. + +The default behavior is to raise a `TimeoutException` after 5 seconds of +network inactivity. + +## Setting and disabling timeouts + +You can set timeouts for an individual request: + +```python +# Using the top-level API: +httpx.get('http://example.com/api/v1/example', timeout=10.0) + +# Using a client instance: +with httpx.Client() as client: + client.get("http://example.com/api/v1/example", timeout=10.0) +``` + +Or disable timeouts for an individual request: + +```python +# Using the top-level API: +httpx.get('http://example.com/api/v1/example', timeout=None) + +# Using a client instance: +with httpx.Client() as client: + client.get("http://example.com/api/v1/example", timeout=None) +``` + +## Setting a default timeout on a client + +You can set a timeout on a client instance, which results in the given +`timeout` being used as the default for requests made with this client: + +```python +client = httpx.Client() # Use a default 5s timeout everywhere. +client = httpx.Client(timeout=10.0) # Use a default 10s timeout everywhere. +client = httpx.Client(timeout=None) # Disable all timeouts by default. +``` + +## Fine tuning the configuration + +HTTPX also allows you to specify the timeout behavior in more fine grained detail. + +There are four different types of timeouts that may occur. These are **connect**, +**read**, **write**, and **pool** timeouts. + +* The **connect** timeout specifies the maximum amount of time to wait until +a socket connection to the requested host is established. If HTTPX is unable to connect +within this time frame, a `ConnectTimeout` exception is raised. +* The **read** timeout specifies the maximum duration to wait for a chunk of +data to be received (for example, a chunk of the response body). If HTTPX is +unable to receive data within this time frame, a `ReadTimeout` exception is raised. +* The **write** timeout specifies the maximum duration to wait for a chunk of +data to be sent (for example, a chunk of the request body). If HTTPX is unable +to send data within this time frame, a `WriteTimeout` exception is raised. +* The **pool** timeout specifies the maximum duration to wait for acquiring +a connection from the connection pool. If HTTPX is unable to acquire a connection +within this time frame, a `PoolTimeout` exception is raised. A related +configuration here is the maximum number of allowable connections in the +connection pool, which is configured by the `limits` argument. + +You can configure the timeout behavior for any of these values... + +```python +# A client with a 60s timeout for connecting, and a 10s timeout elsewhere. +timeout = httpx.Timeout(10.0, connect=60.0) +client = httpx.Client(timeout=timeout) + +response = client.get('http://example.com/') +``` \ No newline at end of file diff --git a/docs/advanced/transports.md b/docs/advanced/transports.md new file mode 100644 index 0000000000..100b562469 --- /dev/null +++ b/docs/advanced/transports.md @@ -0,0 +1,344 @@ +HTTPX's `Client` also accepts a `transport` argument. This argument allows you +to provide a custom Transport object that will be used to perform the actual +sending of the requests. + +## HTTPTransport + +For some advanced configuration you might need to instantiate a transport +class directly, and pass it to the client instance. One example is the +`local_address` configuration which is only available via this low-level API. + +```pycon +>>> import httpx +>>> transport = httpx.HTTPTransport(local_address="0.0.0.0") +>>> client = httpx.Client(transport=transport) +``` + +Connection retries are also available via this interface. Requests will be retried the given number of times in case an `httpx.ConnectError` or an `httpx.ConnectTimeout` occurs, allowing smoother operation under flaky networks. If you need other forms of retry behaviors, such as handling read/write errors or reacting to `503 Service Unavailable`, consider general-purpose tools such as [tenacity](https://github.com/jd/tenacity). + +```pycon +>>> import httpx +>>> transport = httpx.HTTPTransport(retries=1) +>>> client = httpx.Client(transport=transport) +``` + +Similarly, instantiating a transport directly provides a `uds` option for +connecting via a Unix Domain Socket that is only available via this low-level API: + +```pycon +>>> import httpx +>>> # Connect to the Docker API via a Unix Socket. +>>> transport = httpx.HTTPTransport(uds="/var/run/docker.sock") +>>> client = httpx.Client(transport=transport) +>>> response = client.get("http://docker/info") +>>> response.json() +{"ID": "...", "Containers": 4, "Images": 74, ...} +``` + +## WSGI Transport + +You can configure an `httpx` client to call directly into a Python web application using the WSGI protocol. + +This is particularly useful for two main use-cases: + +* Using `httpx` as a client inside test cases. +* Mocking out external services during tests or in dev/staging environments. + +Here's an example of integrating against a Flask application: + +```python +from flask import Flask +import httpx + + +app = Flask(__name__) + +@app.route("/") +def hello(): + return "Hello World!" + +with httpx.Client(app=app, base_url="http://testserver") as client: + r = client.get("/") + assert r.status_code == 200 + assert r.text == "Hello World!" +``` + +For some more complex cases you might need to customize the WSGI transport. This allows you to: + +* Inspect 500 error responses rather than raise exceptions by setting `raise_app_exceptions=False`. +* Mount the WSGI application at a subpath by setting `script_name` (WSGI). +* Use a given client address for requests by setting `remote_addr` (WSGI). + +For example: + +```python +# Instantiate a client that makes WSGI requests with a client IP of "1.2.3.4". +transport = httpx.WSGITransport(app=app, remote_addr="1.2.3.4") +with httpx.Client(transport=transport, base_url="http://testserver") as client: + ... +``` + +## urllib3 transport + +This [public gist](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e) provides a transport that uses the excellent [`urllib3` library](https://urllib3.readthedocs.io/en/latest/), and can be used with the sync `Client`... + +```pycon +>>> import httpx +>>> from urllib3_transport import URLLib3Transport +>>> client = httpx.Client(transport=URLLib3Transport()) +>>> client.get("https://example.org") + +``` + +## Custom transports + +A transport instance must implement the low-level Transport API, which deals +with sending a single request, and returning a response. You should either +subclass `httpx.BaseTransport` to implement a transport to use with `Client`, +or subclass `httpx.AsyncBaseTransport` to implement a transport to +use with `AsyncClient`. + +At the layer of the transport API we're using the familiar `Request` and +`Response` models. + +See the `handle_request` and `handle_async_request` docstrings for more details +on the specifics of the Transport API. + +A complete example of a custom transport implementation would be: + +```python +import json +import httpx + + +class HelloWorldTransport(httpx.BaseTransport): + """ + A mock transport that always returns a JSON "Hello, world!" response. + """ + + def handle_request(self, request): + message = {"text": "Hello, world!"} + content = json.dumps(message).encode("utf-8") + stream = httpx.ByteStream(content) + headers = [(b"content-type", b"application/json")] + return httpx.Response(200, headers=headers, stream=stream) +``` + +Which we can use in the same way: + +```pycon +>>> import httpx +>>> client = httpx.Client(transport=HelloWorldTransport()) +>>> response = client.get("https://example.org/") +>>> response.json() +{"text": "Hello, world!"} +``` + +## Mock transports + +During testing it can often be useful to be able to mock out a transport, +and return pre-determined responses, rather than making actual network requests. + +The `httpx.MockTransport` class accepts a handler function, which can be used +to map requests onto pre-determined responses: + +```python +def handler(request): + return httpx.Response(200, json={"text": "Hello, world!"}) + + +# Switch to a mock transport, if the TESTING environment variable is set. +if os.environ.get('TESTING', '').upper() == "TRUE": + transport = httpx.MockTransport(handler) +else: + transport = httpx.HTTPTransport() + +client = httpx.Client(transport=transport) +``` + +For more advanced use-cases you might want to take a look at either [the third-party +mocking library, RESPX](https://lundberg.github.io/respx/), or the [pytest-httpx library](https://github.com/Colin-b/pytest_httpx). + +## Mounting transports + +You can also mount transports against given schemes or domains, to control +which transport an outgoing request should be routed via, with [the same style +used for specifying proxy routing](#routing). + +```python +import httpx + +class HTTPSRedirectTransport(httpx.BaseTransport): + """ + A transport that always redirects to HTTPS. + """ + + def handle_request(self, method, url, headers, stream, extensions): + scheme, host, port, path = url + if port is None: + location = b"https://%s%s" % (host, path) + else: + location = b"https://%s:%d%s" % (host, port, path) + stream = httpx.ByteStream(b"") + headers = [(b"location", location)] + extensions = {} + return 303, headers, stream, extensions + + +# A client where any `http` requests are always redirected to `https` +mounts = {'http://': HTTPSRedirectTransport()} +client = httpx.Client(mounts=mounts) +``` + +A couple of other sketches of how you might take advantage of mounted transports... + +Disabling HTTP/2 on a single given domain... + +```python +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} +client = httpx.Client(mounts=mounts) +``` + +Mocking requests to a given domain: + +```python +# All requests to "example.org" should be mocked out. +# Other requests occur as usual. +def handler(request): + return httpx.Response(200, json={"text": "Hello, World!"}) + +mounts = {"all://example.org": httpx.MockTransport(handler)} +client = httpx.Client(mounts=mounts) +``` + +Adding support for custom schemes: + +```python +# Support URLs like "file:///Users/sylvia_green/websites/new_client/index.html" +mounts = {"file://": FileSystemTransport()} +client = httpx.Client(mounts=mounts) +``` + +### Routing + +HTTPX provides a powerful mechanism for routing requests, allowing you to write complex rules that specify which transport should be used for each request. + +The `mounts` dictionary maps URL patterns to HTTP transports. HTTPX matches requested URLs against URL patterns to decide which transport should be used, if any. Matching is done from most specific URL patterns (e.g. `https://:`) to least specific ones (e.g. `https://`). + +HTTPX supports routing requests based on **scheme**, **domain**, **port**, or a combination of these. + +### Wildcard routing + +Route everything through a transport... + +```python +mounts = { + "all://": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +### Scheme routing + +Route HTTP requests through one transport, and HTTPS requests through another... + +```python +mounts = { + "http://": httpx.HTTPTransport(proxy="http://localhost:8030"), + "https://": httpx.HTTPTransport(proxy="http://localhost:8031"), +} +``` + +### Domain routing + +Proxy all requests on domain "example.com", let other requests pass through... + +```python +mounts = { + "all://example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +Proxy HTTP requests on domain "example.com", let HTTPS and other requests pass through... + +```python +mounts = { + "http://example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +Proxy all requests to "example.com" and its subdomains, let other requests pass through... + +```python +mounts = { + "all://*example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +Proxy all requests to strict subdomains of "example.com", let "example.com" and other requests pass through... + +```python +mounts = { + "all://*.example.com": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +### Port routing + +Proxy HTTPS requests on port 1234 to "example.com"... + +```python +mounts = { + "https://example.com:1234": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +Proxy all requests on port 1234... + +```python +mounts = { + "all://*:1234": httpx.HTTPTransport(proxy="http://localhost:8030"), +} +``` + +### No-proxy support + +It is also possible to define requests that _shouldn't_ be routed through the transport. + +To do so, pass `None` as the proxy URL. For example... + +```python +mounts = { + # Route requests through a proxy by default... + "all://": httpx.HTTPTransport(proxy="http://localhost:8031"), + # Except those for "example.com". + "all://example.com": None, +} +``` + +### Complex configuration example + +You can combine the routing features outlined above to build complex proxy routing configurations. For example... + +```python +mounts = { + # Route all traffic through a proxy by default... + "all://": httpx.HTTPTransport(proxy="http://localhost:8030"), + # But don't use proxies for HTTPS requests to "domain.io"... + "https://domain.io": None, + # And use another proxy for requests to "example.com" and its subdomains... + "all://*example.com": httpx.HTTPTransport(proxy="http://localhost:8031"), + # And yet another proxy if HTTP is used, + # and the "internal" subdomain on port 5550 is requested... + "http://internal.example.com:5550": httpx.HTTPTransport(proxy="http://localhost:8032"), +} +``` + +### Environment variables + +There are also environment variables that can be used to control the dictionary of the client mounts. +They can be used to configure HTTP proxying for clients. + +See documentation on [`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`](environment_variables.md#http_proxy-https_proxy-all_proxy) for more information. diff --git a/mkdocs.yml b/mkdocs.yml index c0ccd80566..7c6fcbd56d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -16,8 +16,6 @@ theme: toggle: icon: 'material/lightbulb-outline' name: 'Switch to light mode' - features: - - navigation.sections repo_name: encode/httpx repo_url: https://github.com/encode/httpx/ @@ -25,9 +23,17 @@ edit_uri: "" nav: - Introduction: 'index.md' - - Usage: - - QuickStart: 'quickstart.md' - - Advanced Usage: 'advanced.md' + - QuickStart: 'quickstart.md' + - Advanced: + - Clients: 'advanced/clients.md' + - Authentication: 'advanced/authentication.md' + - SSL: 'advanced/ssl.md' + - Proxies: 'advanced/proxies.md' + - Timeouts: 'advanced/timeouts.md' + - Resource Limits: 'advanced/resource-limits.md' + - Event Hooks: 'advanced/event-hooks.md' + - Transports: 'advanced/transports.md' + - Text Encodings: 'advanced/text-encodings.md' - Guides: - Async Support: 'async.md' - HTTP/2 Support: 'http2.md' From 419d3a9d80d0c4072f6cb58eeb306148ae89e2e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 10:04:09 +0000 Subject: [PATCH 14/26] Bump the python-packages group with 3 updates (#3055) Bumps the python-packages group with 3 updates: [ruff](https://github.com/astral-sh/ruff), [trio](https://github.com/python-trio/trio) and [uvicorn](https://github.com/encode/uvicorn). Updates `ruff` from 0.1.9 to 0.1.13 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.1.9...v0.1.13) Updates `trio` from 0.22.2 to 0.24.0 - [Release notes](https://github.com/python-trio/trio/releases) - [Commits](https://github.com/python-trio/trio/compare/v0.22.2...v0.24.0) Updates `uvicorn` from 0.24.0.post1 to 0.25.0 - [Release notes](https://github.com/encode/uvicorn/releases) - [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md) - [Commits](https://github.com/encode/uvicorn/compare/0.24.0.post1...0.25.0) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-packages - dependency-name: trio dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-packages - dependency-name: uvicorn dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-packages ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tom Christie --- requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 218f06c6e5..5582acda15 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,8 +22,8 @@ coverage[toml]==7.4.0 cryptography==41.0.7 mypy==1.8.0 pytest==7.4.4 -ruff==0.1.9 -trio==0.22.2 +ruff==0.1.13 +trio==0.24.0 trio-typing==0.10.0 trustme==1.1.0 -uvicorn==0.24.0.post1 +uvicorn==0.25.0 From 73e688875a3579b67eee5795839b9660a1cfe5d8 Mon Sep 17 00:00:00 2001 From: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> Date: Mon, 15 Jan 2024 15:15:31 +0400 Subject: [PATCH 15/26] Fix sections references (#3058) --- docs/advanced/clients.md | 14 +++++++------- docs/advanced/proxies.md | 2 +- docs/advanced/transports.md | 2 +- docs/async.md | 2 +- docs/compatibility.md | 8 ++++---- docs/environment_variables.md | 2 +- docs/index.md | 4 ++-- docs/quickstart.md | 2 +- docs/third_party_packages.md | 4 ++-- docs/troubleshooting.md | 4 ++-- 10 files changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/advanced/clients.md b/docs/advanced/clients.md index 6905724dcd..a55fc596fb 100644 --- a/docs/advanced/clients.md +++ b/docs/advanced/clients.md @@ -8,7 +8,7 @@ **More efficient usage of network resources** -When you make requests using the top-level API as documented in the [Quickstart](quickstart.md) guide, HTTPX has to establish a new connection _for every single request_ (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient. +When you make requests using the top-level API as documented in the [Quickstart](../quickstart.md) guide, HTTPX has to establish a new connection _for every single request_ (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient. On the other hand, a `Client` instance uses [HTTP connection pooling](https://en.wikipedia.org/wiki/HTTP_persistent_connection). This means that when you make several requests to the same host, the `Client` will reuse the underlying TCP connection, instead of recreating one for every single request. @@ -25,7 +25,7 @@ This can bring **significant performance improvements** compared to using the to - Cookie persistence across requests. - Applying configuration across all outgoing requests. - Sending requests through HTTP proxies. -- Using [HTTP/2](http2.md). +- Using [HTTP/2](../http2.md). The other sections on this page go into further detail about what you can do with a `Client` instance. @@ -60,7 +60,7 @@ Once you have a `Client`, you can send requests using `.get()`, `.post()`, etc. ``` -These methods accept the same arguments as `httpx.get()`, `httpx.post()`, etc. This means that all features documented in the [Quickstart](quickstart.md) guide are also available at the client level. +These methods accept the same arguments as `httpx.get()`, `httpx.post()`, etc. This means that all features documented in the [Quickstart](../quickstart.md) guide are also available at the client level. For example, to send a request with custom headers: @@ -139,13 +139,13 @@ For example, `base_url` allows you to prepend an URL to all outgoing requests: URL('http://httpbin.org/headers') ``` -For a list of all available client parameters, see the [`Client`](api.md#client) API reference. +For a list of all available client parameters, see the [`Client`](../api.md#client) API reference. --- ## Request instances -For maximum control on what gets sent over the wire, HTTPX supports building explicit [`Request`](api.md#request) instances: +For maximum control on what gets sent over the wire, HTTPX supports building explicit [`Request`](../api.md#request) instances: ```python request = httpx.Request("GET", "https://example.com") @@ -203,7 +203,7 @@ with tempfile.NamedTemporaryFile() as download_file: num_bytes_downloaded = response.num_bytes_downloaded ``` -![tqdm progress bar](img/tqdm-progress.gif) +![tqdm progress bar](../img/tqdm-progress.gif) Or an alternate example, this time using the [`rich`](https://github.com/willmcgugan/rich) library… @@ -265,7 +265,7 @@ httpx.post("https://httpbin.org/post", content=gen()) ## Multipart file encoding -As mentioned in the [quickstart](quickstart.md#sending-multipart-file-uploads) +As mentioned in the [quickstart](../quickstart.md#sending-multipart-file-uploads) multipart file encoding is available by passing a dictionary with the name of the payloads as keys and either tuple of elements or a file-like object or a string as values. diff --git a/docs/advanced/proxies.md b/docs/advanced/proxies.md index d51b241a2c..2a6b7d5f36 100644 --- a/docs/advanced/proxies.md +++ b/docs/advanced/proxies.md @@ -63,7 +63,7 @@ How exactly step 2/ is performed depends on which of two proxying mechanisms is ### Troubleshooting proxies -If you encounter issues when setting up proxies, please refer to our [Troubleshooting guide](troubleshooting.md#proxies). +If you encounter issues when setting up proxies, please refer to our [Troubleshooting guide](../troubleshooting.md#proxies). ## SOCKS diff --git a/docs/advanced/transports.md b/docs/advanced/transports.md index 100b562469..135797d5cd 100644 --- a/docs/advanced/transports.md +++ b/docs/advanced/transports.md @@ -341,4 +341,4 @@ mounts = { There are also environment variables that can be used to control the dictionary of the client mounts. They can be used to configure HTTP proxying for clients. -See documentation on [`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`](environment_variables.md#http_proxy-https_proxy-all_proxy) for more information. +See documentation on [`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`](../environment_variables.md#http_proxy-https_proxy-all_proxy) for more information. diff --git a/docs/async.md b/docs/async.md index 1138c30c56..9b679006a6 100644 --- a/docs/async.md +++ b/docs/async.md @@ -84,7 +84,7 @@ The async response streaming methods are: * `Response.aiter_raw()` - For streaming the raw response bytes, without applying content decoding. * `Response.aclose()` - For closing the response. You don't usually need this, since `.stream` block closes the response automatically on exit. -For situations when context block usage is not practical, it is possible to enter "manual mode" by sending a [`Request` instance](./advanced.md#request-instances) using `client.send(..., stream=True)`. +For situations when context block usage is not practical, it is possible to enter "manual mode" by sending a [`Request` instance](advanced/clients.md#request-instances) using `client.send(..., stream=True)`. Example in the context of forwarding the response to a streaming web endpoint with [Starlette](https://www.starlette.io): diff --git a/docs/compatibility.md b/docs/compatibility.md index 7190b65898..e820a67b07 100644 --- a/docs/compatibility.md +++ b/docs/compatibility.md @@ -159,7 +159,7 @@ httpx.get('https://www.example.com', timeout=None) HTTPX uses the mounts argument for HTTP proxying and transport routing. It can do much more than proxies and allows you to configure more than just the proxy route. -For more detailed documentation, see [Mounting Transports](advanced.md#mounting-transports). +For more detailed documentation, see [Mounting Transports](advanced/transports.md#mounting-transports). When using `httpx.Client(mounts={...})` to map to a selection of different transports, we use full URL schemes, such as `mounts={"http://": ..., "https://": ...}`. @@ -197,9 +197,9 @@ We don't support `response.is_ok` since the naming is ambiguous there, and might ## Request instantiation -There is no notion of [prepared requests](https://requests.readthedocs.io/en/stable/user/advanced/#prepared-requests) in HTTPX. If you need to customize request instantiation, see [Request instances](advanced.md#request-instances). +There is no notion of [prepared requests](https://requests.readthedocs.io/en/stable/user/advanced/#prepared-requests) in HTTPX. If you need to customize request instantiation, see [Request instances](advanced/clients.md#request-instances). -Besides, `httpx.Request()` does not support the `auth`, `timeout`, `follow_redirects`, `mounts`, `verify` and `cert` parameters. However these are available in `httpx.request`, `httpx.get`, `httpx.post` etc., as well as on [`Client` instances](advanced.md#client-instances). +Besides, `httpx.Request()` does not support the `auth`, `timeout`, `follow_redirects`, `mounts`, `verify` and `cert` parameters. However these are available in `httpx.request`, `httpx.get`, `httpx.post` etc., as well as on [`Client` instances](advanced/clients.md#client-instances). ## Mocking @@ -227,4 +227,4 @@ For both query params (`params=`) and form data (`data=`), `requests` supports s In HTTPX, event hooks may access properties of requests and responses, but event hook callbacks cannot mutate the original request/response. -If you are looking for more control, consider checking out [Custom Transports](advanced.md#custom-transports). +If you are looking for more control, consider checking out [Custom Transports](advanced/transports.md#custom-transports). diff --git a/docs/environment_variables.md b/docs/environment_variables.md index 71329fc16c..28fdc5e8af 100644 --- a/docs/environment_variables.md +++ b/docs/environment_variables.md @@ -75,7 +75,7 @@ The environment variables documented below are used as a convention by various H * [cURL](https://github.com/curl/curl/blob/master/docs/MANUAL.md#environment-variables) * [requests](https://github.com/psf/requests/blob/master/docs/user/advanced.rst#proxies) -For more information on using proxies in HTTPX, see [HTTP Proxying](advanced.md#http-proxying). +For more information on using proxies in HTTPX, see [HTTP Proxying](advanced/proxies.md#http-proxying). ### `HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY` diff --git a/docs/index.md b/docs/index.md index ec9746697d..86b6d1cbaa 100644 --- a/docs/index.md +++ b/docs/index.md @@ -68,7 +68,7 @@ HTTPX builds on the well-established usability of `requests`, and gives you: * A broadly [requests-compatible API](compatibility.md). * Standard synchronous interface, but with [async support if you need it](async.md). * HTTP/1.1 [and HTTP/2 support](http2.md). -* Ability to make requests directly to [WSGI applications](advanced.md#calling-into-python-web-apps) or [ASGI applications](async.md#calling-into-python-web-apps). +* Ability to make requests directly to [WSGI applications](async.md#calling-into-python-web-apps) or [ASGI applications](async.md#calling-into-python-web-apps). * Strict timeouts everywhere. * Fully type annotated. * 100% test coverage. @@ -95,7 +95,7 @@ Plus all the standard features of `requests`... For a run-through of all the basics, head over to the [QuickStart](quickstart.md). -For more advanced topics, see the [Advanced Usage](advanced.md) section, +For more advanced topics, see the **Advanced** section, the [async support](async.md) section, or the [HTTP/2](http2.md) section. The [Developer Interface](api.md) provides a comprehensive API reference. diff --git a/docs/quickstart.md b/docs/quickstart.md index 068547ffc9..974119f72c 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -462,7 +462,7 @@ You can also disable the timeout behavior completely... >>> httpx.get('https://github.com/', timeout=None) ``` -For advanced timeout management, see [Timeout fine-tuning](advanced.md#fine-tuning-the-configuration). +For advanced timeout management, see [Timeout fine-tuning](advanced/timeouts.md#fine-tuning-the-configuration). ## Authentication diff --git a/docs/third_party_packages.md b/docs/third_party_packages.md index 3d5f4778ec..f6ce96d702 100644 --- a/docs/third_party_packages.md +++ b/docs/third_party_packages.md @@ -28,7 +28,7 @@ An asynchronous GitHub API library. Includes [HTTPX support](https://gidgethub.r [GitHub](https://github.com/Colin-b/httpx_auth) - [Documentation](https://colin-b.github.io/httpx_auth/) -Provides authentication classes to be used with HTTPX [authentication parameter](advanced.md#customizing-authentication). +Provides authentication classes to be used with HTTPX [authentication parameter](advanced/authentication.md#customizing-authentication). ### pytest-HTTPX @@ -80,4 +80,4 @@ A library for scraping the web built on top of HTTPX. [GitHub](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e) -This public gist provides an example implementation for a [custom transport](advanced.md#custom-transports) implementation on top of the battle-tested [`urllib3`](https://urllib3.readthedocs.io) library. +This public gist provides an example implementation for a [custom transport](advanced/transports.md#custom-transports) implementation on top of the battle-tested [`urllib3`](https://urllib3.readthedocs.io) library. diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index a0cb210ccf..a2ca15f564 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -27,7 +27,7 @@ mounts = { Using this setup, you're telling HTTPX to connect to the proxy using HTTP for HTTP requests, and using HTTPS for HTTPS requests. -But if you get the error above, it is likely that your proxy doesn't support connecting via HTTPS. Don't worry: that's a [common gotcha](advanced.md#example). +But if you get the error above, it is likely that your proxy doesn't support connecting via HTTPS. Don't worry: that's a [common gotcha](advanced/proxies.md#http-proxies). Change the scheme of your HTTPS proxy to `http://...` instead of `https://...`: @@ -46,7 +46,7 @@ with httpx.Client(proxy=proxy) as client: ... ``` -For more information, see [Proxies: FORWARD vs TUNNEL](advanced.md#forward-vs-tunnel). +For more information, see [Proxies: FORWARD vs TUNNEL](advanced/proxies.md#forward-vs-tunnel). --- From d76607b112bf0bfbc79dc4dd8566101c69dbfa6e Mon Sep 17 00:00:00 2001 From: Nyakku Shigure Date: Mon, 15 Jan 2024 20:30:09 +0800 Subject: [PATCH 16/26] Adding an indent to fix wrong rendering in warning block (#3056) --- docs/async.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/async.md b/docs/async.md index 9b679006a6..d54a353d62 100644 --- a/docs/async.md +++ b/docs/async.md @@ -54,7 +54,7 @@ async with httpx.AsyncClient() as client: ``` !!! warning -In order to get the most benefit from connection pooling, make sure you're not instantiating multiple client instances - for example by using `async with` inside a "hot loop". This can be achieved either by having a single scoped client that's passed throughout wherever it's needed, or by having a single global client instance. + In order to get the most benefit from connection pooling, make sure you're not instantiating multiple client instances - for example by using `async with` inside a "hot loop". This can be achieved either by having a single scoped client that's passed throughout wherever it's needed, or by having a single global client instance. Alternatively, use `await client.aclose()` if you want to close a client explicitly: From 15f925336c6bad2f5b2711553a6b6763923175de Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Mon, 15 Jan 2024 13:01:04 +0000 Subject: [PATCH 17/26] Drop outdated section (#3057) --- docs/advanced/transports.md | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/docs/advanced/transports.md b/docs/advanced/transports.md index 135797d5cd..fdc58b0841 100644 --- a/docs/advanced/transports.md +++ b/docs/advanced/transports.md @@ -78,18 +78,6 @@ with httpx.Client(transport=transport, base_url="http://testserver") as client: ... ``` -## urllib3 transport - -This [public gist](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e) provides a transport that uses the excellent [`urllib3` library](https://urllib3.readthedocs.io/en/latest/), and can be used with the sync `Client`... - -```pycon ->>> import httpx ->>> from urllib3_transport import URLLib3Transport ->>> client = httpx.Client(transport=URLLib3Transport()) ->>> client.get("https://example.org") - -``` - ## Custom transports A transport instance must implement the low-level Transport API, which deals From c7cd6aa5bdcf9f9d63e7dcea33cb78d6920aeed8 Mon Sep 17 00:00:00 2001 From: T-256 <132141463+T-256@users.noreply.github.com> Date: Tue, 16 Jan 2024 13:23:23 +0330 Subject: [PATCH 18/26] test `obfuscate_sensitive_headers` via public api (#3063) --- tests/test_utils.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 5391f9c22d..2f5b2e13cf 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,7 +12,6 @@ get_ca_bundle_from_env, get_environment_proxies, is_https_redirect, - obfuscate_sensitive_headers, parse_header_links, same_origin, ) @@ -215,10 +214,9 @@ def test_get_environment_proxies(environment, proxies): ], ) def test_obfuscate_sensitive_headers(headers, output): - bytes_headers = [(k.encode(), v.encode()) for k, v in headers] - bytes_output = [(k.encode(), v.encode()) for k, v in output] - assert list(obfuscate_sensitive_headers(headers)) == output - assert list(obfuscate_sensitive_headers(bytes_headers)) == bytes_output + as_dict = {k: v for k, v in output} + headers_class = httpx.Headers({k: v for k, v in headers}) + assert repr(headers_class) == f"Headers({as_dict!r})" def test_same_origin(): From 4f6edf36e93fd9f83ff95b065718fd6bd0c4d3c5 Mon Sep 17 00:00:00 2001 From: T-256 <132141463+T-256@users.noreply.github.com> Date: Tue, 16 Jan 2024 13:55:02 +0330 Subject: [PATCH 19/26] test `parse_header_links` via public api (#3061) * test `parse_header_links` via public api * add no-link test * Update tests/test_utils.py --------- Co-authored-by: Tom Christie --- httpx/_models.py | 14 +++++++------- tests/test_utils.py | 9 +++++++-- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/httpx/_models.py b/httpx/_models.py index b8617cdab5..e08248a8d4 100644 --- a/httpx/_models.py +++ b/httpx/_models.py @@ -774,13 +774,13 @@ def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: Returns the parsed header links of the response, if any """ header = self.headers.get("link") - ldict = {} - if header: - links = parse_header_links(header) - for link in links: - key = link.get("rel") or link.get("url") - ldict[key] = link - return ldict + if header is None: + return {} + + return { + (link.get("rel") or link.get("url")): link + for link in parse_header_links(header) + } @property def num_bytes_downloaded(self) -> int: diff --git a/tests/test_utils.py b/tests/test_utils.py index 2f5b2e13cf..0ef87d18d8 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,7 +12,6 @@ get_ca_bundle_from_env, get_environment_proxies, is_https_redirect, - parse_header_links, same_origin, ) @@ -80,7 +79,13 @@ def test_guess_by_bom(encoding, expected): ), ) def test_parse_header_links(value, expected): - assert parse_header_links(value) == expected + all_links = httpx.Response(200, headers={"link": value}).links.values() + assert all(link in all_links for link in expected) + + +def test_parse_header_links_no_link(): + all_links = httpx.Response(200).links + assert all_links == {} def test_logging_request(server, caplog): From 371b6e946c9e70df3dce79590c195448eac3f635 Mon Sep 17 00:00:00 2001 From: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> Date: Wed, 24 Jan 2024 06:30:22 -0800 Subject: [PATCH 20/26] Use `__future__.annotations` (#3068) * Switch to new typing style * lint --- httpx/_api.py | 168 +++++----- httpx/_auth.py | 40 +-- httpx/_client.py | 501 +++++++++++++++--------------- httpx/_config.py | 32 +- httpx/_content.py | 44 ++- httpx/_decoders.py | 20 +- httpx/_exceptions.py | 18 +- httpx/_main.py | 30 +- httpx/_models.py | 130 ++++---- httpx/_multipart.py | 26 +- httpx/_status_codes.py | 4 +- httpx/_transports/asgi.py | 12 +- httpx/_transports/base.py | 14 +- httpx/_transports/default.py | 34 +- httpx/_transports/mock.py | 4 +- httpx/_transports/wsgi.py | 10 +- httpx/_urlparse.py | 26 +- httpx/_urls.py | 44 ++- httpx/_utils.py | 50 +-- tests/client/test_async_client.py | 6 +- tests/client/test_client.py | 6 +- tests/test_decoders.py | 4 +- tests/test_exceptions.py | 4 +- tests/test_multipart.py | 4 +- tests/test_wsgi.py | 16 +- 25 files changed, 623 insertions(+), 624 deletions(-) diff --git a/httpx/_api.py b/httpx/_api.py index c7af947218..b5821cc49e 100644 --- a/httpx/_api.py +++ b/httpx/_api.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from contextlib import contextmanager @@ -25,20 +27,20 @@ def request( method: str, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, follow_redirects: bool = False, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, trust_env: bool = True, ) -> Response: """ @@ -120,20 +122,20 @@ def stream( method: str, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, follow_redirects: bool = False, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, trust_env: bool = True, ) -> typing.Iterator[Response]: """ @@ -173,14 +175,14 @@ def stream( def get( url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -213,14 +215,14 @@ def get( def options( url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -253,14 +255,14 @@ def options( def head( url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -293,18 +295,18 @@ def head( def post( url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -338,18 +340,18 @@ def post( def put( url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -383,18 +385,18 @@ def put( def patch( url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, @@ -428,14 +430,14 @@ def patch( def delete( url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, diff --git a/httpx/_auth.py b/httpx/_auth.py index e8bc0cd961..903e399617 100644 --- a/httpx/_auth.py +++ b/httpx/_auth.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import hashlib import os import re @@ -124,18 +126,14 @@ class BasicAuth(Auth): and uses HTTP Basic authentication. """ - def __init__( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> None: + def __init__(self, username: str | bytes, password: str | bytes) -> None: self._auth_header = self._build_auth_header(username, password) def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: request.headers["Authorization"] = self._auth_header yield request - def _build_auth_header( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> str: + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: userpass = b":".join((to_bytes(username), to_bytes(password))) token = b64encode(userpass).decode() return f"Basic {token}" @@ -146,7 +144,7 @@ class NetRCAuth(Auth): Use a 'netrc' file to lookup basic auth credentials based on the url host. """ - def __init__(self, file: typing.Optional[str] = None) -> None: + def __init__(self, file: str | None = None) -> None: # Lazily import 'netrc'. # There's no need for us to load this module unless 'NetRCAuth' is being used. import netrc @@ -165,16 +163,14 @@ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, Non ) yield request - def _build_auth_header( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> str: + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: userpass = b":".join((to_bytes(username), to_bytes(password))) token = b64encode(userpass).decode() return f"Basic {token}" class DigestAuth(Auth): - _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable[[bytes], "_Hash"]] = { + _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { "MD5": hashlib.md5, "MD5-SESS": hashlib.md5, "SHA": hashlib.sha1, @@ -185,12 +181,10 @@ class DigestAuth(Auth): "SHA-512-SESS": hashlib.sha512, } - def __init__( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> None: + def __init__(self, username: str | bytes, password: str | bytes) -> None: self._username = to_bytes(username) self._password = to_bytes(password) - self._last_challenge: typing.Optional[_DigestAuthChallenge] = None + self._last_challenge: _DigestAuthChallenge | None = None self._nonce_count = 1 def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: @@ -226,7 +220,7 @@ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, Non def _parse_challenge( self, request: Request, response: Response, auth_header: str - ) -> "_DigestAuthChallenge": + ) -> _DigestAuthChallenge: """ Returns a challenge from a Digest WWW-Authenticate header. These take the form of: @@ -237,7 +231,7 @@ def _parse_challenge( # This method should only ever have been called with a Digest auth header. assert scheme.lower() == "digest" - header_dict: typing.Dict[str, str] = {} + header_dict: dict[str, str] = {} for field in parse_http_list(fields): key, value = field.strip().split("=", 1) header_dict[key] = unquote(value) @@ -256,7 +250,7 @@ def _parse_challenge( raise ProtocolError(message, request=request) from exc def _build_auth_header( - self, request: Request, challenge: "_DigestAuthChallenge" + self, request: Request, challenge: _DigestAuthChallenge ) -> str: hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] @@ -311,7 +305,7 @@ def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: return hashlib.sha1(s).hexdigest()[:16].encode() - def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: + def _get_header_value(self, header_fields: dict[str, bytes]) -> str: NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") QUOTED_TEMPLATE = '{}="{}"' NON_QUOTED_TEMPLATE = "{}={}" @@ -329,9 +323,7 @@ def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: return header_value - def _resolve_qop( - self, qop: typing.Optional[bytes], request: Request - ) -> typing.Optional[bytes]: + def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: if qop is None: return None qops = re.split(b", ?", qop) @@ -349,5 +341,5 @@ class _DigestAuthChallenge(typing.NamedTuple): realm: bytes nonce: bytes algorithm: str - opaque: typing.Optional[bytes] - qop: typing.Optional[bytes] + opaque: bytes | None + qop: bytes | None diff --git a/httpx/_client.py b/httpx/_client.py index a0b4209c46..1f2145d12e 100644 --- a/httpx/_client.py +++ b/httpx/_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime import enum import logging @@ -160,19 +162,17 @@ class BaseClient: def __init__( self, *, - auth: typing.Optional[AuthTypes] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, follow_redirects: bool = False, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Optional[ - typing.Mapping[str, typing.List[EventHook]] - ] = None, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, base_url: URLTypes = "", trust_env: bool = True, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", ) -> None: event_hooks = {} if event_hooks is None else event_hooks @@ -210,8 +210,8 @@ def _enforce_trailing_slash(self, url: URL) -> URL: return url.copy_with(raw_path=url.raw_path + b"/") def _get_proxy_map( - self, proxies: typing.Optional[ProxiesTypes], allow_env_proxies: bool - ) -> typing.Dict[str, typing.Optional[Proxy]]: + self, proxies: ProxiesTypes | None, allow_env_proxies: bool + ) -> dict[str, Proxy | None]: if proxies is None: if allow_env_proxies: return { @@ -238,20 +238,18 @@ def timeout(self, timeout: TimeoutTypes) -> None: self._timeout = Timeout(timeout) @property - def event_hooks(self) -> typing.Dict[str, typing.List[EventHook]]: + def event_hooks(self) -> dict[str, list[EventHook]]: return self._event_hooks @event_hooks.setter - def event_hooks( - self, event_hooks: typing.Dict[str, typing.List[EventHook]] - ) -> None: + def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: self._event_hooks = { "request": list(event_hooks.get("request", [])), "response": list(event_hooks.get("response", [])), } @property - def auth(self) -> typing.Optional[Auth]: + def auth(self) -> Auth | None: """ Authentication class used when none is passed at the request-level. @@ -323,15 +321,15 @@ def build_request( method: str, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Request: """ Build and return a request instance. @@ -391,9 +389,7 @@ def _merge_url(self, url: URLTypes) -> URL: return self.base_url.copy_with(raw_path=merge_raw_path) return merge_url - def _merge_cookies( - self, cookies: typing.Optional[CookieTypes] = None - ) -> typing.Optional[CookieTypes]: + def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: """ Merge a cookies argument together with any cookies on the client, to create the cookies used for the outgoing request. @@ -404,9 +400,7 @@ def _merge_cookies( return merged_cookies return cookies - def _merge_headers( - self, headers: typing.Optional[HeaderTypes] = None - ) -> typing.Optional[HeaderTypes]: + def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: """ Merge a headers argument together with any headers on the client, to create the headers used for the outgoing request. @@ -416,8 +410,8 @@ def _merge_headers( return merged_headers def _merge_queryparams( - self, params: typing.Optional[QueryParamTypes] = None - ) -> typing.Optional[QueryParamTypes]: + self, params: QueryParamTypes | None = None + ) -> QueryParamTypes | None: """ Merge a queryparams argument together with any queryparams on the client, to create the queryparams used for the outgoing request. @@ -427,7 +421,7 @@ def _merge_queryparams( return merged_queryparams.merge(params) return params - def _build_auth(self, auth: typing.Optional[AuthTypes]) -> typing.Optional[Auth]: + def _build_auth(self, auth: AuthTypes | None) -> Auth | None: if auth is None: return None elif isinstance(auth, tuple): @@ -442,7 +436,7 @@ def _build_auth(self, auth: typing.Optional[AuthTypes]) -> typing.Optional[Auth] def _build_request_auth( self, request: Request, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, ) -> Auth: auth = ( self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) @@ -557,7 +551,7 @@ def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: def _redirect_stream( self, request: Request, method: str - ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]: + ) -> SyncByteStream | AsyncByteStream | None: """ Return the body that should be used for the redirect request. """ @@ -624,31 +618,27 @@ class Client(BaseClient): def __init__( self, *, - auth: typing.Optional[AuthTypes] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - mounts: typing.Optional[ - typing.Mapping[str, typing.Optional[BaseTransport]] - ] = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, follow_redirects: bool = False, limits: Limits = DEFAULT_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Optional[ - typing.Mapping[str, typing.List[EventHook]] - ] = None, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, base_url: URLTypes = "", - transport: typing.Optional[BaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, + transport: BaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, trust_env: bool = True, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", ) -> None: super().__init__( auth=auth, @@ -695,7 +685,7 @@ def __init__( app=app, trust_env=trust_env, ) - self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = { + self._mounts: dict[URLPattern, BaseTransport | None] = { URLPattern(key): None if proxy is None else self._init_proxy_transport( @@ -719,12 +709,12 @@ def __init__( def _init_transport( self, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, - transport: typing.Optional[BaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, + transport: BaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, trust_env: bool = True, ) -> BaseTransport: if transport is not None: @@ -746,7 +736,7 @@ def _init_proxy_transport( self, proxy: Proxy, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, @@ -778,17 +768,17 @@ def request( method: str, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Build and send a request. @@ -835,17 +825,17 @@ def stream( method: str, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> typing.Iterator[Response]: """ Alternative to `httpx.request()` that streams the response body @@ -886,8 +876,8 @@ def send( request: Request, *, stream: bool = False, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, ) -> Response: """ Send a request. @@ -935,7 +925,7 @@ def _send_handling_auth( request: Request, auth: Auth, follow_redirects: bool, - history: typing.List[Response], + history: list[Response], ) -> Response: auth_flow = auth.sync_auth_flow(request) try: @@ -968,7 +958,7 @@ def _send_handling_redirects( self, request: Request, follow_redirects: bool, - history: typing.List[Response], + history: list[Response], ) -> Response: while True: if len(history) > self.max_redirects: @@ -1041,13 +1031,13 @@ def get( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `GET` request. @@ -1070,13 +1060,13 @@ def options( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send an `OPTIONS` request. @@ -1099,13 +1089,13 @@ def head( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `HEAD` request. @@ -1128,17 +1118,17 @@ def post( self, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `POST` request. @@ -1165,17 +1155,17 @@ def put( self, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `PUT` request. @@ -1202,17 +1192,17 @@ def patch( self, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `PATCH` request. @@ -1239,13 +1229,13 @@ def delete( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `DELETE` request. @@ -1296,9 +1286,9 @@ def __enter__(self: T) -> T: def __exit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self._state = ClientState.CLOSED @@ -1366,31 +1356,28 @@ class AsyncClient(BaseClient): def __init__( self, *, - auth: typing.Optional[AuthTypes] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, - proxy: typing.Optional[ProxyTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - mounts: typing.Optional[ - typing.Mapping[str, typing.Optional[AsyncBaseTransport]] - ] = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, follow_redirects: bool = False, limits: Limits = DEFAULT_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Optional[ - typing.Mapping[str, typing.List[typing.Callable[..., typing.Any]]] - ] = None, + event_hooks: None + | (typing.Mapping[str, list[typing.Callable[..., typing.Any]]]) = None, base_url: URLTypes = "", - transport: typing.Optional[AsyncBaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, + transport: AsyncBaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, trust_env: bool = True, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", ) -> None: super().__init__( auth=auth, @@ -1438,7 +1425,7 @@ def __init__( trust_env=trust_env, ) - self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = { + self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { URLPattern(key): None if proxy is None else self._init_proxy_transport( @@ -1461,12 +1448,12 @@ def __init__( def _init_transport( self, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, - transport: typing.Optional[AsyncBaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, + transport: AsyncBaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, trust_env: bool = True, ) -> AsyncBaseTransport: if transport is not None: @@ -1488,7 +1475,7 @@ def _init_proxy_transport( self, proxy: Proxy, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, @@ -1520,17 +1507,17 @@ async def request( method: str, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Build and send a request. @@ -1578,17 +1565,17 @@ async def stream( method: str, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> typing.AsyncIterator[Response]: """ Alternative to `httpx.request()` that streams the response body @@ -1629,8 +1616,8 @@ async def send( request: Request, *, stream: bool = False, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, ) -> Response: """ Send a request. @@ -1678,7 +1665,7 @@ async def _send_handling_auth( request: Request, auth: Auth, follow_redirects: bool, - history: typing.List[Response], + history: list[Response], ) -> Response: auth_flow = auth.async_auth_flow(request) try: @@ -1711,7 +1698,7 @@ async def _send_handling_redirects( self, request: Request, follow_redirects: bool, - history: typing.List[Response], + history: list[Response], ) -> Response: while True: if len(history) > self.max_redirects: @@ -1784,13 +1771,13 @@ async def get( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `GET` request. @@ -1813,13 +1800,13 @@ async def options( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send an `OPTIONS` request. @@ -1842,13 +1829,13 @@ async def head( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `HEAD` request. @@ -1871,17 +1858,17 @@ async def post( self, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `POST` request. @@ -1908,17 +1895,17 @@ async def put( self, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `PUT` request. @@ -1945,17 +1932,17 @@ async def patch( self, url: URLTypes, *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `PATCH` request. @@ -1982,13 +1969,13 @@ async def delete( self, url: URLTypes, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, ) -> Response: """ Send a `DELETE` request. @@ -2039,9 +2026,9 @@ async def __aenter__(self: U) -> U: async def __aexit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self._state = ClientState.CLOSED diff --git a/httpx/_config.py b/httpx/_config.py index 0cfd552e49..7636a5dcc5 100644 --- a/httpx/_config.py +++ b/httpx/_config.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import os import ssl @@ -43,7 +45,7 @@ class UnsetType: def create_ssl_context( - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, trust_env: bool = True, http2: bool = False, @@ -63,7 +65,7 @@ class SSLConfig: def __init__( self, *, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, verify: VerifyTypes = True, trust_env: bool = True, http2: bool = False, @@ -205,12 +207,12 @@ class Timeout: def __init__( self, - timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + timeout: TimeoutTypes | UnsetType = UNSET, *, - connect: typing.Union[None, float, UnsetType] = UNSET, - read: typing.Union[None, float, UnsetType] = UNSET, - write: typing.Union[None, float, UnsetType] = UNSET, - pool: typing.Union[None, float, UnsetType] = UNSET, + connect: None | float | UnsetType = UNSET, + read: None | float | UnsetType = UNSET, + write: None | float | UnsetType = UNSET, + pool: None | float | UnsetType = UNSET, ) -> None: if isinstance(timeout, Timeout): # Passed as a single explicit Timeout. @@ -249,7 +251,7 @@ def __init__( self.write = timeout if isinstance(write, UnsetType) else write self.pool = timeout if isinstance(pool, UnsetType) else pool - def as_dict(self) -> typing.Dict[str, typing.Optional[float]]: + def as_dict(self) -> dict[str, float | None]: return { "connect": self.connect, "read": self.read, @@ -293,9 +295,9 @@ class Limits: def __init__( self, *, - max_connections: typing.Optional[int] = None, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = 5.0, + max_connections: int | None = None, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = 5.0, ) -> None: self.max_connections = max_connections self.max_keepalive_connections = max_keepalive_connections @@ -323,9 +325,9 @@ def __init__( self, url: URLTypes, *, - ssl_context: typing.Optional[ssl.SSLContext] = None, - auth: typing.Optional[typing.Tuple[str, str]] = None, - headers: typing.Optional[HeaderTypes] = None, + ssl_context: ssl.SSLContext | None = None, + auth: tuple[str, str] | None = None, + headers: HeaderTypes | None = None, ) -> None: url = URL(url) headers = Headers(headers) @@ -344,7 +346,7 @@ def __init__( self.ssl_context = ssl_context @property - def raw_auth(self) -> typing.Optional[typing.Tuple[bytes, bytes]]: + def raw_auth(self) -> tuple[bytes, bytes] | None: # The proxy authentication as raw bytes. return ( None diff --git a/httpx/_content.py b/httpx/_content.py index cd0d17f171..10b574bb3d 100644 --- a/httpx/_content.py +++ b/httpx/_content.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import inspect import warnings from json import dumps as json_dumps @@ -5,13 +7,9 @@ Any, AsyncIterable, AsyncIterator, - Dict, Iterable, Iterator, Mapping, - Optional, - Tuple, - Union, ) from urllib.parse import urlencode @@ -105,8 +103,8 @@ async def __aiter__(self) -> AsyncIterator[bytes]: def encode_content( - content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]], -) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: if isinstance(content, (bytes, str)): body = content.encode("utf-8") if isinstance(content, str) else content content_length = len(body) @@ -135,7 +133,7 @@ def encode_content( def encode_urlencoded_data( data: RequestData, -) -> Tuple[Dict[str, str], ByteStream]: +) -> tuple[dict[str, str], ByteStream]: plain_data = [] for key, value in data.items(): if isinstance(value, (list, tuple)): @@ -150,14 +148,14 @@ def encode_urlencoded_data( def encode_multipart_data( - data: RequestData, files: RequestFiles, boundary: Optional[bytes] -) -> Tuple[Dict[str, str], MultipartStream]: + data: RequestData, files: RequestFiles, boundary: bytes | None +) -> tuple[dict[str, str], MultipartStream]: multipart = MultipartStream(data=data, files=files, boundary=boundary) headers = multipart.get_headers() return headers, multipart -def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: +def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: body = text.encode("utf-8") content_length = str(len(body)) content_type = "text/plain; charset=utf-8" @@ -165,7 +163,7 @@ def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: return headers, ByteStream(body) -def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: +def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: body = html.encode("utf-8") content_length = str(len(body)) content_type = "text/html; charset=utf-8" @@ -173,7 +171,7 @@ def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: return headers, ByteStream(body) -def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: +def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: body = json_dumps(json).encode("utf-8") content_length = str(len(body)) content_type = "application/json" @@ -182,12 +180,12 @@ def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: def encode_request( - content: Optional[RequestContent] = None, - data: Optional[RequestData] = None, - files: Optional[RequestFiles] = None, - json: Optional[Any] = None, - boundary: Optional[bytes] = None, -) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: Any | None = None, + boundary: bytes | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: """ Handles encoding the given `content`, `data`, `files`, and `json`, returning a two-tuple of (, ). @@ -217,11 +215,11 @@ def encode_request( def encode_response( - content: Optional[ResponseContent] = None, - text: Optional[str] = None, - html: Optional[str] = None, - json: Optional[Any] = None, -) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: Any | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: """ Handles encoding the given `content`, returning a two-tuple of (, ). diff --git a/httpx/_decoders.py b/httpx/_decoders.py index 3f507c8e04..31c72c7f7a 100644 --- a/httpx/_decoders.py +++ b/httpx/_decoders.py @@ -3,6 +3,8 @@ See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding """ +from __future__ import annotations + import codecs import io import typing @@ -167,11 +169,11 @@ class ByteChunker: Handles returning byte content in fixed-size chunks. """ - def __init__(self, chunk_size: typing.Optional[int] = None) -> None: + def __init__(self, chunk_size: int | None = None) -> None: self._buffer = io.BytesIO() self._chunk_size = chunk_size - def decode(self, content: bytes) -> typing.List[bytes]: + def decode(self, content: bytes) -> list[bytes]: if self._chunk_size is None: return [content] if content else [] @@ -194,7 +196,7 @@ def decode(self, content: bytes) -> typing.List[bytes]: else: return [] - def flush(self) -> typing.List[bytes]: + def flush(self) -> list[bytes]: value = self._buffer.getvalue() self._buffer.seek(0) self._buffer.truncate() @@ -206,11 +208,11 @@ class TextChunker: Handles returning text content in fixed-size chunks. """ - def __init__(self, chunk_size: typing.Optional[int] = None) -> None: + def __init__(self, chunk_size: int | None = None) -> None: self._buffer = io.StringIO() self._chunk_size = chunk_size - def decode(self, content: str) -> typing.List[str]: + def decode(self, content: str) -> list[str]: if self._chunk_size is None: return [content] if content else [] @@ -233,7 +235,7 @@ def decode(self, content: str) -> typing.List[str]: else: return [] - def flush(self) -> typing.List[str]: + def flush(self) -> list[str]: value = self._buffer.getvalue() self._buffer.seek(0) self._buffer.truncate() @@ -264,10 +266,10 @@ class LineDecoder: """ def __init__(self) -> None: - self.buffer: typing.List[str] = [] + self.buffer: list[str] = [] self.trailing_cr: bool = False - def decode(self, text: str) -> typing.List[str]: + def decode(self, text: str) -> list[str]: # See https://docs.python.org/3/library/stdtypes.html#str.splitlines NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" @@ -305,7 +307,7 @@ def decode(self, text: str) -> typing.List[str]: return lines - def flush(self) -> typing.List[str]: + def flush(self) -> list[str]: if not self.buffer and not self.trailing_cr: return [] diff --git a/httpx/_exceptions.py b/httpx/_exceptions.py index 123692955b..11424621c0 100644 --- a/httpx/_exceptions.py +++ b/httpx/_exceptions.py @@ -30,6 +30,8 @@ x ResponseNotRead x RequestNotRead """ +from __future__ import annotations + import contextlib import typing @@ -57,16 +59,16 @@ class HTTPError(Exception): def __init__(self, message: str) -> None: super().__init__(message) - self._request: typing.Optional["Request"] = None + self._request: Request | None = None @property - def request(self) -> "Request": + def request(self) -> Request: if self._request is None: raise RuntimeError("The .request property has not been set.") return self._request @request.setter - def request(self, request: "Request") -> None: + def request(self, request: Request) -> None: self._request = request @@ -75,9 +77,7 @@ class RequestError(HTTPError): Base class for all exceptions that may occur when issuing a `.request()`. """ - def __init__( - self, message: str, *, request: typing.Optional["Request"] = None - ) -> None: + def __init__(self, message: str, *, request: Request | None = None) -> None: super().__init__(message) # At the point an exception is raised we won't typically have a request # instance to associate it with. @@ -230,9 +230,7 @@ class HTTPStatusError(HTTPError): May be raised when calling `response.raise_for_status()` """ - def __init__( - self, message: str, *, request: "Request", response: "Response" - ) -> None: + def __init__(self, message: str, *, request: Request, response: Response) -> None: super().__init__(message) self.request = request self.response = response @@ -335,7 +333,7 @@ def __init__(self) -> None: @contextlib.contextmanager def request_context( - request: typing.Optional["Request"] = None, + request: Request | None = None, ) -> typing.Iterator[None]: """ A context manager that can be used to attach the given request context diff --git a/httpx/_main.py b/httpx/_main.py index adb57d5fc0..72657f8ca3 100644 --- a/httpx/_main.py +++ b/httpx/_main.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import functools import json import sys @@ -125,8 +127,8 @@ def format_request_headers(request: httpcore.Request, http2: bool = False) -> st def format_response_headers( http_version: bytes, status: int, - reason_phrase: typing.Optional[bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], ) -> str: version = http_version.decode("ascii") reason = ( @@ -152,8 +154,8 @@ def print_request_headers(request: httpcore.Request, http2: bool = False) -> Non def print_response_headers( http_version: bytes, status: int, - reason_phrase: typing.Optional[bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], ) -> None: console = rich.console.Console() http_text = format_response_headers(http_version, status, reason_phrase, headers) @@ -268,7 +270,7 @@ def download_response(response: Response, download: typing.BinaryIO) -> None: def validate_json( ctx: click.Context, - param: typing.Union[click.Option, click.Parameter], + param: click.Option | click.Parameter, value: typing.Any, ) -> typing.Any: if value is None: @@ -282,7 +284,7 @@ def validate_json( def validate_auth( ctx: click.Context, - param: typing.Union[click.Option, click.Parameter], + param: click.Option | click.Parameter, value: typing.Any, ) -> typing.Any: if value == (None, None): @@ -296,7 +298,7 @@ def validate_auth( def handle_help( ctx: click.Context, - param: typing.Union[click.Option, click.Parameter], + param: click.Option | click.Parameter, value: typing.Any, ) -> None: if not value or ctx.resilient_parsing: @@ -448,20 +450,20 @@ def handle_help( def main( url: str, method: str, - params: typing.List[typing.Tuple[str, str]], + params: list[tuple[str, str]], content: str, - data: typing.List[typing.Tuple[str, str]], - files: typing.List[typing.Tuple[str, click.File]], + data: list[tuple[str, str]], + files: list[tuple[str, click.File]], json: str, - headers: typing.List[typing.Tuple[str, str]], - cookies: typing.List[typing.Tuple[str, str]], - auth: typing.Optional[typing.Tuple[str, str]], + headers: list[tuple[str, str]], + cookies: list[tuple[str, str]], + auth: tuple[str, str] | None, proxy: str, timeout: float, follow_redirects: bool, verify: bool, http2: bool, - download: typing.Optional[typing.BinaryIO], + download: typing.BinaryIO | None, verbose: bool, ) -> None: """ diff --git a/httpx/_models.py b/httpx/_models.py index e08248a8d4..cd76705f1a 100644 --- a/httpx/_models.py +++ b/httpx/_models.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime import email.message import json as jsonlib @@ -59,8 +61,8 @@ class Headers(typing.MutableMapping[str, str]): def __init__( self, - headers: typing.Optional[HeaderTypes] = None, - encoding: typing.Optional[str] = None, + headers: HeaderTypes | None = None, + encoding: str | None = None, ) -> None: if headers is None: self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] @@ -117,7 +119,7 @@ def encoding(self, value: str) -> None: self._encoding = value @property - def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + def raw(self) -> list[tuple[bytes, bytes]]: """ Returns a list of the raw header items, as byte pairs. """ @@ -127,7 +129,7 @@ def keys(self) -> typing.KeysView[str]: return {key.decode(self.encoding): None for _, key, value in self._list}.keys() def values(self) -> typing.ValuesView[str]: - values_dict: typing.Dict[str, str] = {} + values_dict: dict[str, str] = {} for _, key, value in self._list: str_key = key.decode(self.encoding) str_value = value.decode(self.encoding) @@ -142,7 +144,7 @@ def items(self) -> typing.ItemsView[str, str]: Return `(key, value)` items of headers. Concatenate headers into a single comma separated value when a key occurs multiple times. """ - values_dict: typing.Dict[str, str] = {} + values_dict: dict[str, str] = {} for _, key, value in self._list: str_key = key.decode(self.encoding) str_value = value.decode(self.encoding) @@ -152,7 +154,7 @@ def items(self) -> typing.ItemsView[str, str]: values_dict[str_key] = str_value return values_dict.items() - def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + def multi_items(self) -> list[tuple[str, str]]: """ Return a list of `(key, value)` pairs of headers. Allow multiple occurrences of the same key without concatenating into a single @@ -173,7 +175,7 @@ def get(self, key: str, default: typing.Any = None) -> typing.Any: except KeyError: return default - def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: + def get_list(self, key: str, split_commas: bool = False) -> list[str]: """ Return a list of all header values for a given key. If `split_commas=True` is passed, then any comma separated header @@ -195,14 +197,14 @@ def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: split_values.extend([item.strip() for item in value.split(",")]) return split_values - def update(self, headers: typing.Optional[HeaderTypes] = None) -> None: # type: ignore + def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore headers = Headers(headers) for key in headers.keys(): if key in self: self.pop(key) self._list.extend(headers._list) - def copy(self) -> "Headers": + def copy(self) -> Headers: return Headers(self, encoding=self.encoding) def __getitem__(self, key: str) -> str: @@ -306,18 +308,18 @@ def __repr__(self) -> str: class Request: def __init__( self, - method: typing.Union[str, bytes], - url: typing.Union["URL", str], + method: str | bytes, + url: URL | str, *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, - extensions: typing.Optional[RequestExtensions] = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + stream: SyncByteStream | AsyncByteStream | None = None, + extensions: RequestExtensions | None = None, ) -> None: self.method = ( method.decode("ascii").upper() @@ -334,7 +336,7 @@ def __init__( Cookies(cookies).set_cookie_header(self) if stream is None: - content_type: typing.Optional[str] = self.headers.get("content-type") + content_type: str | None = self.headers.get("content-type") headers, stream = encode_request( content=content, data=data, @@ -368,14 +370,14 @@ def __init__( # * Creating request instances on the *server-side* of the transport API. self.stream = stream - def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + def _prepare(self, default_headers: dict[str, str]) -> None: for key, value in default_headers.items(): # Ignore Transfer-Encoding if the Content-Length has been set explicitly. if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: continue self.headers.setdefault(key, value) - auto_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + auto_headers: list[tuple[bytes, bytes]] = [] has_host = "Host" in self.headers has_content_length = ( @@ -428,14 +430,14 @@ def __repr__(self) -> str: url = str(self.url) return f"<{class_name}({self.method!r}, {url!r})>" - def __getstate__(self) -> typing.Dict[str, typing.Any]: + def __getstate__(self) -> dict[str, typing.Any]: return { name: value for name, value in self.__dict__.items() if name not in ["extensions", "stream"] } - def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + def __setstate__(self, state: dict[str, typing.Any]) -> None: for name, value in state.items(): setattr(self, name, value) self.extensions = {} @@ -447,25 +449,25 @@ def __init__( self, status_code: int, *, - headers: typing.Optional[HeaderTypes] = None, - content: typing.Optional[ResponseContent] = None, - text: typing.Optional[str] = None, - html: typing.Optional[str] = None, + headers: HeaderTypes | None = None, + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, json: typing.Any = None, - stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, - request: typing.Optional[Request] = None, - extensions: typing.Optional[ResponseExtensions] = None, - history: typing.Optional[typing.List["Response"]] = None, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + stream: SyncByteStream | AsyncByteStream | None = None, + request: Request | None = None, + extensions: ResponseExtensions | None = None, + history: list[Response] | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", ) -> None: self.status_code = status_code self.headers = Headers(headers) - self._request: typing.Optional[Request] = request + self._request: Request | None = request # When follow_redirects=False and a redirect is received, # the client will set `response.next_request`. - self.next_request: typing.Optional[Request] = None + self.next_request: Request | None = None self.extensions: ResponseExtensions = {} if extensions is None else extensions self.history = [] if history is None else list(history) @@ -498,7 +500,7 @@ def __init__( self._num_bytes_downloaded = 0 - def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + def _prepare(self, default_headers: dict[str, str]) -> None: for key, value in default_headers.items(): # Ignore Transfer-Encoding if the Content-Length has been set explicitly. if key.lower() == "transfer-encoding" and "content-length" in self.headers: @@ -580,7 +582,7 @@ def text(self) -> str: return self._text @property - def encoding(self) -> typing.Optional[str]: + def encoding(self) -> str | None: """ Return an encoding to use for decoding the byte content into text. The priority for determining this is given by... @@ -616,7 +618,7 @@ def encoding(self, value: str) -> None: self._encoding = value @property - def charset_encoding(self) -> typing.Optional[str]: + def charset_encoding(self) -> str | None: """ Return the encoding, as specified by the Content-Type header. """ @@ -632,7 +634,7 @@ def _get_content_decoder(self) -> ContentDecoder: content, depending on the Content-Encoding used in the response. """ if not hasattr(self, "_decoder"): - decoders: typing.List[ContentDecoder] = [] + decoders: list[ContentDecoder] = [] values = self.headers.get_list("content-encoding", split_commas=True) for value in values: value = value.strip().lower() @@ -721,7 +723,7 @@ def has_redirect_location(self) -> bool: and "Location" in self.headers ) - def raise_for_status(self) -> "Response": + def raise_for_status(self) -> Response: """ Raise the `HTTPStatusError` if one occurred. """ @@ -762,14 +764,14 @@ def json(self, **kwargs: typing.Any) -> typing.Any: return jsonlib.loads(self.content, **kwargs) @property - def cookies(self) -> "Cookies": + def cookies(self) -> Cookies: if not hasattr(self, "_cookies"): self._cookies = Cookies() self._cookies.extract_cookies(self) return self._cookies @property - def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: + def links(self) -> dict[str | None, dict[str, str]]: """ Returns the parsed header links of the response, if any """ @@ -789,14 +791,14 @@ def num_bytes_downloaded(self) -> int: def __repr__(self) -> str: return f"" - def __getstate__(self) -> typing.Dict[str, typing.Any]: + def __getstate__(self) -> dict[str, typing.Any]: return { name: value for name, value in self.__dict__.items() if name not in ["extensions", "stream", "is_closed", "_decoder"] } - def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + def __setstate__(self, state: dict[str, typing.Any]) -> None: for name, value in state.items(): setattr(self, name, value) self.is_closed = True @@ -811,9 +813,7 @@ def read(self) -> bytes: self._content = b"".join(self.iter_bytes()) return self._content - def iter_bytes( - self, chunk_size: typing.Optional[int] = None - ) -> typing.Iterator[bytes]: + def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: """ A byte-iterator over the decoded response content. This allows us to handle gzip, deflate, and brotli encoded responses. @@ -836,9 +836,7 @@ def iter_bytes( for chunk in chunker.flush(): yield chunk - def iter_text( - self, chunk_size: typing.Optional[int] = None - ) -> typing.Iterator[str]: + def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: """ A str-iterator over the decoded response content that handles both gzip, deflate, etc but also detects the content's @@ -866,9 +864,7 @@ def iter_lines(self) -> typing.Iterator[str]: for line in decoder.flush(): yield line - def iter_raw( - self, chunk_size: typing.Optional[int] = None - ) -> typing.Iterator[bytes]: + def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: """ A byte-iterator over the raw response content. """ @@ -916,7 +912,7 @@ async def aread(self) -> bytes: return self._content async def aiter_bytes( - self, chunk_size: typing.Optional[int] = None + self, chunk_size: int | None = None ) -> typing.AsyncIterator[bytes]: """ A byte-iterator over the decoded response content. @@ -941,7 +937,7 @@ async def aiter_bytes( yield chunk async def aiter_text( - self, chunk_size: typing.Optional[int] = None + self, chunk_size: int | None = None ) -> typing.AsyncIterator[str]: """ A str-iterator over the decoded response content @@ -971,7 +967,7 @@ async def aiter_lines(self) -> typing.AsyncIterator[str]: yield line async def aiter_raw( - self, chunk_size: typing.Optional[int] = None + self, chunk_size: int | None = None ) -> typing.AsyncIterator[bytes]: """ A byte-iterator over the raw response content. @@ -1017,7 +1013,7 @@ class Cookies(typing.MutableMapping[str, str]): HTTP Cookies, as a mutable mapping. """ - def __init__(self, cookies: typing.Optional[CookieTypes] = None) -> None: + def __init__(self, cookies: CookieTypes | None = None) -> None: if cookies is None or isinstance(cookies, dict): self.jar = CookieJar() if isinstance(cookies, dict): @@ -1079,10 +1075,10 @@ def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: def get( # type: ignore self, name: str, - default: typing.Optional[str] = None, - domain: typing.Optional[str] = None, - path: typing.Optional[str] = None, - ) -> typing.Optional[str]: + default: str | None = None, + domain: str | None = None, + path: str | None = None, + ) -> str | None: """ Get a cookie by name. May optionally include domain and path in order to specify exactly which cookie to retrieve. @@ -1104,8 +1100,8 @@ def get( # type: ignore def delete( self, name: str, - domain: typing.Optional[str] = None, - path: typing.Optional[str] = None, + domain: str | None = None, + path: str | None = None, ) -> None: """ Delete a cookie by name. May optionally include domain and path @@ -1125,9 +1121,7 @@ def delete( for cookie in remove: self.jar.clear(cookie.domain, cookie.path, cookie.name) - def clear( - self, domain: typing.Optional[str] = None, path: typing.Optional[str] = None - ) -> None: + def clear(self, domain: str | None = None, path: str | None = None) -> None: """ Delete all cookies. Optionally include a domain and path in order to only delete a subset of all the cookies. @@ -1140,7 +1134,7 @@ def clear( args.append(path) self.jar.clear(*args) - def update(self, cookies: typing.Optional[CookieTypes] = None) -> None: # type: ignore + def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore cookies = Cookies(cookies) for cookie in cookies.jar: self.jar.set_cookie(cookie) diff --git a/httpx/_multipart.py b/httpx/_multipart.py index 1d451c382b..8edb622778 100644 --- a/httpx/_multipart.py +++ b/httpx/_multipart.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import os import typing @@ -21,8 +23,8 @@ def get_multipart_boundary_from_content_type( - content_type: typing.Optional[bytes], -) -> typing.Optional[bytes]: + content_type: bytes | None, +) -> bytes | None: if not content_type or not content_type.startswith(b"multipart/form-data"): return None # parse boundary according to @@ -39,9 +41,7 @@ class DataField: A single form field item, within a multipart form field. """ - def __init__( - self, name: str, value: typing.Union[str, bytes, int, float, None] - ) -> None: + def __init__(self, name: str, value: str | bytes | int | float | None) -> None: if not isinstance(name, str): raise TypeError( f"Invalid type for name. Expected str, got {type(name)}: {name!r}" @@ -52,7 +52,7 @@ def __init__( f" got {type(value)}: {value!r}" ) self.name = name - self.value: typing.Union[str, bytes] = ( + self.value: str | bytes = ( value if isinstance(value, bytes) else primitive_value_to_str(value) ) @@ -93,8 +93,8 @@ def __init__(self, name: str, value: FileTypes) -> None: fileobj: FileContent - headers: typing.Dict[str, str] = {} - content_type: typing.Optional[str] = None + headers: dict[str, str] = {} + content_type: str | None = None # This large tuple based API largely mirror's requests' API # It would be good to think of better APIs for this that we could @@ -137,7 +137,7 @@ def __init__(self, name: str, value: FileTypes) -> None: self.file = fileobj self.headers = headers - def get_length(self) -> typing.Optional[int]: + def get_length(self) -> int | None: headers = self.render_headers() if isinstance(self.file, (str, bytes)): @@ -199,7 +199,7 @@ def __init__( self, data: RequestData, files: RequestFiles, - boundary: typing.Optional[bytes] = None, + boundary: bytes | None = None, ) -> None: if boundary is None: boundary = os.urandom(16).hex().encode("ascii") @@ -212,7 +212,7 @@ def __init__( def _iter_fields( self, data: RequestData, files: RequestFiles - ) -> typing.Iterator[typing.Union[FileField, DataField]]: + ) -> typing.Iterator[FileField | DataField]: for name, value in data.items(): if isinstance(value, (tuple, list)): for item in value: @@ -231,7 +231,7 @@ def iter_chunks(self) -> typing.Iterator[bytes]: yield b"\r\n" yield b"--%s--\r\n" % self.boundary - def get_content_length(self) -> typing.Optional[int]: + def get_content_length(self) -> int | None: """ Return the length of the multipart encoded content, or `None` if any of the files have a length that cannot be determined upfront. @@ -253,7 +253,7 @@ def get_content_length(self) -> typing.Optional[int]: # Content stream interface. - def get_headers(self) -> typing.Dict[str, str]: + def get_headers(self) -> dict[str, str]: content_length = self.get_content_length() content_type = self.content_type if content_length is None: diff --git a/httpx/_status_codes.py b/httpx/_status_codes.py index 671c30e1b8..4cde4e6845 100644 --- a/httpx/_status_codes.py +++ b/httpx/_status_codes.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from enum import IntEnum @@ -21,7 +23,7 @@ class codes(IntEnum): * RFC 8470: Using Early Data in HTTP """ - def __new__(cls, value: int, phrase: str = "") -> "codes": + def __new__(cls, value: int, phrase: str = "") -> codes: obj = int.__new__(cls, value) obj._value_ = value diff --git a/httpx/_transports/asgi.py b/httpx/_transports/asgi.py index 08cd392f75..9543a12861 100644 --- a/httpx/_transports/asgi.py +++ b/httpx/_transports/asgi.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing import sniffio @@ -24,7 +26,7 @@ ] -def create_event() -> "Event": +def create_event() -> Event: if sniffio.current_async_library() == "trio": import trio @@ -36,7 +38,7 @@ def create_event() -> "Event": class ASGIResponseStream(AsyncByteStream): - def __init__(self, body: typing.List[bytes]) -> None: + def __init__(self, body: list[bytes]) -> None: self._body = body async def __aiter__(self) -> typing.AsyncIterator[bytes]: @@ -81,7 +83,7 @@ def __init__( app: _ASGIApp, raise_app_exceptions: bool = True, root_path: str = "", - client: typing.Tuple[str, int] = ("127.0.0.1", 123), + client: tuple[str, int] = ("127.0.0.1", 123), ) -> None: self.app = app self.raise_app_exceptions = raise_app_exceptions @@ -123,7 +125,7 @@ async def handle_async_request( # ASGI callables. - async def receive() -> typing.Dict[str, typing.Any]: + async def receive() -> dict[str, typing.Any]: nonlocal request_complete if request_complete: @@ -137,7 +139,7 @@ async def receive() -> typing.Dict[str, typing.Any]: return {"type": "http.request", "body": b"", "more_body": False} return {"type": "http.request", "body": body, "more_body": True} - async def send(message: typing.Dict[str, typing.Any]) -> None: + async def send(message: dict[str, typing.Any]) -> None: nonlocal status_code, response_headers, response_started if message["type"] == "http.response.start": diff --git a/httpx/_transports/base.py b/httpx/_transports/base.py index f6fdfe6943..8b6dc3c239 100644 --- a/httpx/_transports/base.py +++ b/httpx/_transports/base.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from types import TracebackType @@ -13,9 +15,9 @@ def __enter__(self: T) -> T: def __exit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self.close() @@ -64,9 +66,9 @@ async def __aenter__(self: A) -> A: async def __aexit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: await self.aclose() diff --git a/httpx/_transports/default.py b/httpx/_transports/default.py index 14a087389a..14476a3ce3 100644 --- a/httpx/_transports/default.py +++ b/httpx/_transports/default.py @@ -23,6 +23,8 @@ transport = httpx.HTTPTransport(uds="socket.uds") client = httpx.Client(transport=transport) """ +from __future__ import annotations + import contextlib import typing from types import TracebackType @@ -120,16 +122,16 @@ class HTTPTransport(BaseTransport): def __init__( self, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, trust_env: bool = True, - proxy: typing.Optional[ProxyTypes] = None, - uds: typing.Optional[str] = None, - local_address: typing.Optional[str] = None, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, retries: int = 0, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> None: ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy @@ -202,9 +204,9 @@ def __enter__(self: T) -> T: # Use generics for subclass support. def __exit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: with map_httpcore_exceptions(): self._pool.__exit__(exc_type, exc_value, traceback) @@ -261,16 +263,16 @@ class AsyncHTTPTransport(AsyncBaseTransport): def __init__( self, verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, + cert: CertTypes | None = None, http1: bool = True, http2: bool = False, limits: Limits = DEFAULT_LIMITS, trust_env: bool = True, - proxy: typing.Optional[ProxyTypes] = None, - uds: typing.Optional[str] = None, - local_address: typing.Optional[str] = None, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, retries: int = 0, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> None: ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy @@ -342,9 +344,9 @@ async def __aenter__(self: A) -> A: # Use generics for subclass support. async def __aexit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: with map_httpcore_exceptions(): await self._pool.__aexit__(exc_type, exc_value, traceback) diff --git a/httpx/_transports/mock.py b/httpx/_transports/mock.py index 82043da2d9..5abea83731 100644 --- a/httpx/_transports/mock.py +++ b/httpx/_transports/mock.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from .._models import Request, Response @@ -8,7 +10,7 @@ class MockTransport(AsyncBaseTransport, BaseTransport): - def __init__(self, handler: typing.Union[SyncHandler, AsyncHandler]) -> None: + def __init__(self, handler: SyncHandler | AsyncHandler) -> None: self.handler = handler def handle_request( diff --git a/httpx/_transports/wsgi.py b/httpx/_transports/wsgi.py index a23d42c414..cd03a9417b 100644 --- a/httpx/_transports/wsgi.py +++ b/httpx/_transports/wsgi.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import itertools import sys @@ -71,11 +73,11 @@ class WSGITransport(BaseTransport): def __init__( self, - app: "WSGIApplication", + app: WSGIApplication, raise_app_exceptions: bool = True, script_name: str = "", remote_addr: str = "127.0.0.1", - wsgi_errors: typing.Optional[typing.TextIO] = None, + wsgi_errors: typing.TextIO | None = None, ) -> None: self.app = app self.raise_app_exceptions = raise_app_exceptions @@ -117,8 +119,8 @@ def handle_request(self, request: Request) -> Response: def start_response( status: str, - response_headers: typing.List[typing.Tuple[str, str]], - exc_info: typing.Optional["OptExcInfo"] = None, + response_headers: list[tuple[str, str]], + exc_info: OptExcInfo | None = None, ) -> typing.Callable[[bytes], typing.Any]: nonlocal seen_status, seen_response_headers, seen_exc_info seen_status = status diff --git a/httpx/_urlparse.py b/httpx/_urlparse.py index 07bbea9070..6a4b55b38c 100644 --- a/httpx/_urlparse.py +++ b/httpx/_urlparse.py @@ -15,6 +15,8 @@ validation, but this module provides a simpler alternative, with less indirection required. """ +from __future__ import annotations + import ipaddress import re import typing @@ -95,10 +97,10 @@ class ParseResult(typing.NamedTuple): scheme: str userinfo: str host: str - port: typing.Optional[int] + port: int | None path: str - query: typing.Optional[str] - fragment: typing.Optional[str] + query: str | None + fragment: str | None @property def authority(self) -> str: @@ -119,7 +121,7 @@ def netloc(self) -> str: ] ) - def copy_with(self, **kwargs: typing.Optional[str]) -> "ParseResult": + def copy_with(self, **kwargs: str | None) -> ParseResult: if not kwargs: return self @@ -146,7 +148,7 @@ def __str__(self) -> str: ) -def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: +def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: # Initial basic checks on allowable URLs. # --------------------------------------- @@ -243,7 +245,7 @@ def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: parsed_scheme: str = scheme.lower() parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") parsed_host: str = encode_host(host) - parsed_port: typing.Optional[int] = normalize_port(port, scheme) + parsed_port: int | None = normalize_port(port, scheme) has_scheme = parsed_scheme != "" has_authority = ( @@ -260,11 +262,11 @@ def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: # For 'path' we need to drop ? and # from the GEN_DELIMS set. parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@") # For 'query' we need to drop '#' from the GEN_DELIMS set. - parsed_query: typing.Optional[str] = ( + parsed_query: str | None = ( None if query is None else quote(query, safe=SUB_DELIMS + ":/?[]@") ) # For 'fragment' we can include all of the GEN_DELIMS set. - parsed_fragment: typing.Optional[str] = ( + parsed_fragment: str | None = ( None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@") ) @@ -327,9 +329,7 @@ def encode_host(host: str) -> str: raise InvalidURL(f"Invalid IDNA hostname: {host!r}") -def normalize_port( - port: typing.Optional[typing.Union[str, int]], scheme: str -) -> typing.Optional[int]: +def normalize_port(port: str | int | None, scheme: str) -> int | None: # From https://tools.ietf.org/html/rfc3986#section-3.2.3 # # "A scheme may define a default port. For example, the "http" scheme @@ -393,7 +393,7 @@ def normalize_path(path: str) -> str: """ # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 components = path.split("/") - output: typing.List[str] = [] + output: list[str] = [] for component in components: if component == ".": pass @@ -479,7 +479,7 @@ def quote(string: str, safe: str = "/") -> str: return "".join(parts) -def urlencode(items: typing.List[typing.Tuple[str, str]]) -> str: +def urlencode(items: list[tuple[str, str]]) -> str: """ We can use a much simpler version of the stdlib urlencode here because we don't need to handle a bunch of different typing cases, such as bytes vs str. diff --git a/httpx/_urls.py b/httpx/_urls.py index 26202e95db..43dedd5644 100644 --- a/httpx/_urls.py +++ b/httpx/_urls.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from urllib.parse import parse_qs, unquote @@ -70,9 +72,7 @@ class URL: themselves. """ - def __init__( - self, url: typing.Union["URL", str] = "", **kwargs: typing.Any - ) -> None: + def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: if kwargs: allowed = { "scheme": str, @@ -213,7 +213,7 @@ def raw_host(self) -> bytes: return self._uri_reference.host.encode("ascii") @property - def port(self) -> typing.Optional[int]: + def port(self) -> int | None: """ The URL port as an integer. @@ -270,7 +270,7 @@ def query(self) -> bytes: return query.encode("ascii") @property - def params(self) -> "QueryParams": + def params(self) -> QueryParams: """ The URL query parameters, neatly parsed and packaged into an immutable multidict representation. @@ -338,7 +338,7 @@ def is_relative_url(self) -> bool: """ return not self.is_absolute_url - def copy_with(self, **kwargs: typing.Any) -> "URL": + def copy_with(self, **kwargs: typing.Any) -> URL: """ Copy this URL, returning a new URL with some components altered. Accepts the same set of parameters as the components that are made @@ -353,19 +353,19 @@ def copy_with(self, **kwargs: typing.Any) -> "URL": """ return URL(self, **kwargs) - def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": + def copy_set_param(self, key: str, value: typing.Any = None) -> URL: return self.copy_with(params=self.params.set(key, value)) - def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": + def copy_add_param(self, key: str, value: typing.Any = None) -> URL: return self.copy_with(params=self.params.add(key, value)) - def copy_remove_param(self, key: str) -> "URL": + def copy_remove_param(self, key: str) -> URL: return self.copy_with(params=self.params.remove(key)) - def copy_merge_params(self, params: QueryParamTypes) -> "URL": + def copy_merge_params(self, params: QueryParamTypes) -> URL: return self.copy_with(params=self.params.merge(params)) - def join(self, url: URLTypes) -> "URL": + def join(self, url: URLTypes) -> URL: """ Return an absolute URL, using this URL as the base. @@ -420,9 +420,7 @@ class QueryParams(typing.Mapping[str, str]): URL query parameters, as a multi-dict. """ - def __init__( - self, *args: typing.Optional[QueryParamTypes], **kwargs: typing.Any - ) -> None: + def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: assert len(args) < 2, "Too many arguments." assert not (args and kwargs), "Cannot mix named and unnamed arguments." @@ -434,7 +432,7 @@ def __init__( elif isinstance(value, QueryParams): self._dict = {k: list(v) for k, v in value._dict.items()} else: - dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} + dict_value: dict[typing.Any, list[typing.Any]] = {} if isinstance(value, (list, tuple)): # Convert list inputs like: # [("a", "123"), ("a", "456"), ("b", "789")] @@ -495,7 +493,7 @@ def items(self) -> typing.ItemsView[str, str]: """ return {k: v[0] for k, v in self._dict.items()}.items() - def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + def multi_items(self) -> list[tuple[str, str]]: """ Return all items in the query params. Allow duplicate keys to occur. @@ -504,7 +502,7 @@ def multi_items(self) -> typing.List[typing.Tuple[str, str]]: q = httpx.QueryParams("a=123&a=456&b=789") assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] """ - multi_items: typing.List[typing.Tuple[str, str]] = [] + multi_items: list[tuple[str, str]] = [] for k, v in self._dict.items(): multi_items.extend([(k, i) for i in v]) return multi_items @@ -523,7 +521,7 @@ def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: return self._dict[str(key)][0] return default - def get_list(self, key: str) -> typing.List[str]: + def get_list(self, key: str) -> list[str]: """ Get all values from the query param for a given key. @@ -534,7 +532,7 @@ def get_list(self, key: str) -> typing.List[str]: """ return list(self._dict.get(str(key), [])) - def set(self, key: str, value: typing.Any = None) -> "QueryParams": + def set(self, key: str, value: typing.Any = None) -> QueryParams: """ Return a new QueryParams instance, setting the value of a key. @@ -549,7 +547,7 @@ def set(self, key: str, value: typing.Any = None) -> "QueryParams": q._dict[str(key)] = [primitive_value_to_str(value)] return q - def add(self, key: str, value: typing.Any = None) -> "QueryParams": + def add(self, key: str, value: typing.Any = None) -> QueryParams: """ Return a new QueryParams instance, setting or appending the value of a key. @@ -564,7 +562,7 @@ def add(self, key: str, value: typing.Any = None) -> "QueryParams": q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] return q - def remove(self, key: str) -> "QueryParams": + def remove(self, key: str) -> QueryParams: """ Return a new QueryParams instance, removing the value of a key. @@ -579,7 +577,7 @@ def remove(self, key: str) -> "QueryParams": q._dict.pop(str(key), None) return q - def merge(self, params: typing.Optional[QueryParamTypes] = None) -> "QueryParams": + def merge(self, params: QueryParamTypes | None = None) -> QueryParams: """ Return a new QueryParams instance, updated with. @@ -635,7 +633,7 @@ def __repr__(self) -> str: query_string = str(self) return f"{class_name}({query_string!r})" - def update(self, params: typing.Optional[QueryParamTypes] = None) -> None: + def update(self, params: QueryParamTypes | None = None) -> None: raise RuntimeError( "QueryParams are immutable since 0.18.0. " "Use `q = q.merge(...)` to create an updated copy." diff --git a/httpx/_utils.py b/httpx/_utils.py index bc3cb001dd..a9ece19438 100644 --- a/httpx/_utils.py +++ b/httpx/_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import codecs import email.message import ipaddress @@ -27,9 +29,9 @@ def normalize_header_key( - value: typing.Union[str, bytes], + value: str | bytes, lower: bool, - encoding: typing.Optional[str] = None, + encoding: str | None = None, ) -> bytes: """ Coerce str/bytes into a strictly byte-wise HTTP header key. @@ -42,9 +44,7 @@ def normalize_header_key( return bytes_value.lower() if lower else bytes_value -def normalize_header_value( - value: typing.Union[str, bytes], encoding: typing.Optional[str] = None -) -> bytes: +def normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: """ Coerce str/bytes into a strictly byte-wise HTTP header value. """ @@ -53,7 +53,7 @@ def normalize_header_value( return value.encode(encoding or "ascii") -def primitive_value_to_str(value: "PrimitiveData") -> str: +def primitive_value_to_str(value: PrimitiveData) -> str: """ Coerce a primitive data type into a string value. @@ -91,7 +91,7 @@ def replacer(match: typing.Match[str]) -> str: return f'{name}="{value}"'.encode() -def get_ca_bundle_from_env() -> typing.Optional[str]: +def get_ca_bundle_from_env() -> str | None: if "SSL_CERT_FILE" in os.environ: ssl_file = Path(os.environ["SSL_CERT_FILE"]) if ssl_file.is_file(): @@ -103,7 +103,7 @@ def get_ca_bundle_from_env() -> typing.Optional[str]: return None -def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: +def parse_header_links(value: str) -> list[dict[str, str]]: """ Returns a list of parsed link headers, for more info see: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link @@ -119,7 +119,7 @@ def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: :param value: HTTP Link entity-header field :return: list of parsed link headers """ - links: typing.List[typing.Dict[str, str]] = [] + links: list[dict[str, str]] = [] replace_chars = " '\"" value = value.strip(replace_chars) if not value: @@ -140,7 +140,7 @@ def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: return links -def parse_content_type_charset(content_type: str) -> typing.Optional[str]: +def parse_content_type_charset(content_type: str) -> str | None: # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. # See: https://peps.python.org/pep-0594/#cgi msg = email.message.Message() @@ -152,21 +152,21 @@ def parse_content_type_charset(content_type: str) -> typing.Optional[str]: def obfuscate_sensitive_headers( - items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]], -) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]: + items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], +) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: for k, v in items: if to_str(k.lower()) in SENSITIVE_HEADERS: v = to_bytes_or_str("[secure]", match_type_of=v) yield k, v -def port_or_default(url: "URL") -> typing.Optional[int]: +def port_or_default(url: URL) -> int | None: if url.port is not None: return url.port return {"http": 80, "https": 443}.get(url.scheme) -def same_origin(url: "URL", other: "URL") -> bool: +def same_origin(url: URL, other: URL) -> bool: """ Return 'True' if the given URLs share the same origin. """ @@ -177,7 +177,7 @@ def same_origin(url: "URL", other: "URL") -> bool: ) -def is_https_redirect(url: "URL", location: "URL") -> bool: +def is_https_redirect(url: URL, location: URL) -> bool: """ Return 'True' if 'location' is a HTTPS upgrade of 'url' """ @@ -192,7 +192,7 @@ def is_https_redirect(url: "URL", location: "URL") -> bool: ) -def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: +def get_environment_proxies() -> dict[str, str | None]: """Gets proxy information from the environment""" # urllib.request.getproxies() falls back on System @@ -200,7 +200,7 @@ def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: # We don't want to propagate non-HTTP proxies into # our configuration such as 'TRAVIS_APT_PROXY'. proxy_info = getproxies() - mounts: typing.Dict[str, typing.Optional[str]] = {} + mounts: dict[str, str | None] = {} for scheme in ("http", "https", "all"): if proxy_info.get(scheme): @@ -241,11 +241,11 @@ def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: return mounts -def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes: +def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: return value.encode(encoding) if isinstance(value, str) else value -def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str: +def to_str(value: str | bytes, encoding: str = "utf-8") -> str: return value if isinstance(value, str) else value.decode(encoding) @@ -257,13 +257,13 @@ def unquote(value: str) -> str: return value[1:-1] if value[0] == value[-1] == '"' else value -def guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]: +def guess_content_type(filename: str | None) -> str | None: if filename: return mimetypes.guess_type(filename)[0] or "application/octet-stream" return None -def peek_filelike_length(stream: typing.Any) -> typing.Optional[int]: +def peek_filelike_length(stream: typing.Any) -> int | None: """ Given a file-like stream object, return its length in number of bytes without reading it into memory. @@ -373,7 +373,7 @@ def __init__(self, pattern: str) -> None: self.host = "" if url.host == "*" else url.host self.port = url.port if not url.host or url.host == "*": - self.host_regex: typing.Optional[typing.Pattern[str]] = None + self.host_regex: typing.Pattern[str] | None = None elif url.host.startswith("*."): # *.example.com should match "www.example.com", but not "example.com" domain = re.escape(url.host[2:]) @@ -387,7 +387,7 @@ def __init__(self, pattern: str) -> None: domain = re.escape(url.host) self.host_regex = re.compile(f"^{domain}$") - def matches(self, other: "URL") -> bool: + def matches(self, other: URL) -> bool: if self.scheme and self.scheme != other.scheme: return False if ( @@ -401,7 +401,7 @@ def matches(self, other: "URL") -> bool: return True @property - def priority(self) -> typing.Tuple[int, int, int]: + def priority(self) -> tuple[int, int, int]: """ The priority allows URLPattern instances to be sortable, so that we can match from most specific to least specific. @@ -417,7 +417,7 @@ def priority(self) -> typing.Tuple[int, int, int]: def __hash__(self) -> int: return hash(self.pattern) - def __lt__(self, other: "URLPattern") -> bool: + def __lt__(self, other: URLPattern) -> bool: return self.priority < other.priority def __eq__(self, other: typing.Any) -> bool: diff --git a/tests/client/test_async_client.py b/tests/client/test_async_client.py index 49664df589..8d7eaa3c58 100644 --- a/tests/client/test_async_client.py +++ b/tests/client/test_async_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from datetime import timedelta @@ -181,7 +183,7 @@ async def test_100_continue(server): async def test_context_managed_transport(): class Transport(httpx.AsyncBaseTransport): def __init__(self) -> None: - self.events: typing.List[str] = [] + self.events: list[str] = [] async def aclose(self): # The base implementation of httpx.AsyncBaseTransport just @@ -214,7 +216,7 @@ async def test_context_managed_transport_and_mount(): class Transport(httpx.AsyncBaseTransport): def __init__(self, name: str) -> None: self.name: str = name - self.events: typing.List[str] = [] + self.events: list[str] = [] async def aclose(self): # The base implementation of httpx.AsyncBaseTransport just diff --git a/tests/client/test_client.py b/tests/client/test_client.py index fcc6ec6a08..2951e01b8a 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from datetime import timedelta @@ -230,7 +232,7 @@ def test_merge_relative_url_with_encoded_slashes(): def test_context_managed_transport(): class Transport(httpx.BaseTransport): def __init__(self) -> None: - self.events: typing.List[str] = [] + self.events: list[str] = [] def close(self): # The base implementation of httpx.BaseTransport just @@ -262,7 +264,7 @@ def test_context_managed_transport_and_mount(): class Transport(httpx.BaseTransport): def __init__(self, name: str) -> None: self.name: str = name - self.events: typing.List[str] = [] + self.events: list[str] = [] def close(self): # The base implementation of httpx.BaseTransport just diff --git a/tests/test_decoders.py b/tests/test_decoders.py index 170a93453c..73644e04e6 100644 --- a/tests/test_decoders.py +++ b/tests/test_decoders.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing import zlib @@ -224,7 +226,7 @@ def test_text_decoder_empty_cases(): [((b"Hello,", b" world!"), ["Hello,", " world!"])], ) def test_streaming_text_decoder( - data: typing.Iterable[bytes], expected: typing.List[str] + data: typing.Iterable[bytes], expected: list[str] ) -> None: response = httpx.Response(200, content=iter(data)) assert list(response.iter_text()) == expected diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 6547ab37a5..60c8721c02 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing import httpcore @@ -34,7 +36,7 @@ def test_httpcore_all_exceptions_mapped() -> None: pytest.fail(f"Unmapped httpcore exceptions: {unmapped_exceptions}") -def test_httpcore_exception_mapping(server: "TestServer") -> None: +def test_httpcore_exception_mapping(server: TestServer) -> None: """ HTTPCore exception mapping works as expected. """ diff --git a/tests/test_multipart.py b/tests/test_multipart.py index fc283c9cc4..5c4629152c 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import tempfile import typing @@ -148,7 +150,7 @@ def test_multipart_file_tuple(): @pytest.mark.parametrize("file_content_type", [None, "text/plain"]) -def test_multipart_file_tuple_headers(file_content_type: typing.Optional[str]) -> None: +def test_multipart_file_tuple_headers(file_content_type: str | None) -> None: file_name = "test.txt" file_content = io.BytesIO(b"") file_headers = {"Expires": "0"} diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py index a952da6af9..3565a48c92 100644 --- a/tests/test_wsgi.py +++ b/tests/test_wsgi.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import typing import wsgiref.validate @@ -12,7 +14,7 @@ from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -def application_factory(output: typing.Iterable[bytes]) -> "WSGIApplication": +def application_factory(output: typing.Iterable[bytes]) -> WSGIApplication: def application(environ, start_response): status = "200 OK" @@ -29,7 +31,7 @@ def application(environ, start_response): def echo_body( - environ: "WSGIEnvironment", start_response: "StartResponse" + environ: WSGIEnvironment, start_response: StartResponse ) -> typing.Iterable[bytes]: status = "200 OK" output = environ["wsgi.input"].read() @@ -44,7 +46,7 @@ def echo_body( def echo_body_with_response_stream( - environ: "WSGIEnvironment", start_response: "StartResponse" + environ: WSGIEnvironment, start_response: StartResponse ) -> typing.Iterable[bytes]: status = "200 OK" @@ -63,9 +65,9 @@ def output_generator(f: typing.IO[bytes]) -> typing.Iterator[bytes]: def raise_exc( - environ: "WSGIEnvironment", - start_response: "StartResponse", - exc: typing.Type[Exception] = ValueError, + environ: WSGIEnvironment, + start_response: StartResponse, + exc: type[Exception] = ValueError, ) -> typing.Iterable[bytes]: status = "500 Server Error" output = b"Nope!" @@ -161,7 +163,7 @@ def test_wsgi_server_port(url: str, expected_server_port: str) -> None: SERVER_PORT is populated correctly from the requested URL. """ hello_world_app = application_factory([b"Hello, World!"]) - server_port: typing.Optional[str] = None + server_port: str | None = None def app(environ, start_response): nonlocal server_port From 37a2901af38f4142ee902c838aaaf92cf8a80f6a Mon Sep 17 00:00:00 2001 From: Richie B2B Date: Tue, 30 Jan 2024 08:01:56 +0100 Subject: [PATCH 21/26] Mention NO_PROXY environment variable on Advanced Usage page (#3066) Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- docs/advanced/transports.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/advanced/transports.md b/docs/advanced/transports.md index fdc58b0841..2f3e00690d 100644 --- a/docs/advanced/transports.md +++ b/docs/advanced/transports.md @@ -329,4 +329,5 @@ mounts = { There are also environment variables that can be used to control the dictionary of the client mounts. They can be used to configure HTTP proxying for clients. -See documentation on [`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`](../environment_variables.md#http_proxy-https_proxy-all_proxy) for more information. +See documentation on [`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`](../environment_variables.md#http_proxy-https_proxy-all_proxy) +and [`NO_PROXY`](../environment_variables.md#no_proxy) for more information. From 6f461522a5c58839c31be3c82b7bd47fba77109b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 16:19:47 +0000 Subject: [PATCH 22/26] Bump the python-packages group with 6 updates (#3077) Bumps the python-packages group with 6 updates: | Package | From | To | | --- | --- | --- | | [mkdocs-material](https://github.com/squidfunk/mkdocs-material) | `9.5.3` | `9.5.6` | | [coverage[toml]](https://github.com/nedbat/coveragepy) | `7.4.0` | `7.4.1` | | [cryptography](https://github.com/pyca/cryptography) | `41.0.7` | `42.0.2` | | [pytest](https://github.com/pytest-dev/pytest) | `7.4.4` | `8.0.0` | | [ruff](https://github.com/astral-sh/ruff) | `0.1.13` | `0.1.15` | | [uvicorn](https://github.com/encode/uvicorn) | `0.25.0` | `0.27.0.post1` | Updates `mkdocs-material` from 9.5.3 to 9.5.6 - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.3...9.5.6) Updates `coverage[toml]` from 7.4.0 to 7.4.1 - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.4.0...7.4.1) Updates `cryptography` from 41.0.7 to 42.0.2 - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/41.0.7...42.0.2) Updates `pytest` from 7.4.4 to 8.0.0 - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.4...8.0.0) Updates `ruff` from 0.1.13 to 0.1.15 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.1.13...v0.1.15) Updates `uvicorn` from 0.25.0 to 0.27.0.post1 - [Release notes](https://github.com/encode/uvicorn/releases) - [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md) - [Commits](https://github.com/encode/uvicorn/compare/0.25.0...0.27.0.post1) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-packages - dependency-name: coverage[toml] dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-packages - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-packages - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-major dependency-group: python-packages - dependency-name: ruff dependency-type: direct:production update-type: version-update:semver-patch dependency-group: python-packages - dependency-name: uvicorn dependency-type: direct:production update-type: version-update:semver-minor dependency-group: python-packages ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5582acda15..f127064bdd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,19 +11,19 @@ chardet==5.2.0 # Documentation mkdocs==1.5.3 mkautodoc==0.2.0 -mkdocs-material==9.5.3 +mkdocs-material==9.5.6 # Packaging build==1.0.3 twine==4.0.2 # Tests & Linting -coverage[toml]==7.4.0 -cryptography==41.0.7 +coverage[toml]==7.4.1 +cryptography==42.0.2 mypy==1.8.0 -pytest==7.4.4 -ruff==0.1.13 +pytest==8.0.0 +ruff==0.1.15 trio==0.24.0 trio-typing==0.10.0 trustme==1.1.0 -uvicorn==0.25.0 +uvicorn==0.27.0.post1 From cabd1c095e52e2f67ab63ec17941d3a539d7c877 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Fri, 2 Feb 2024 13:29:41 +0000 Subject: [PATCH 23/26] Deprecate `app=...` in favor of explicit `WSGITransport`/`ASGITransport`. (#3050) * Deprecate app=... in favour of explicit WSGITransport/ASGITransport * Linting * Linting * Update WSGITransport and ASGITransport docs * Deprecate app * Drop deprecation tests * Add CHANGELOG * Deprecate 'app=...' shortcut, rather than removing it. * Update CHANGELOG * Fix test_asgi.test_deprecated_shortcut --- CHANGELOG.md | 4 +++ docs/advanced/transports.md | 72 +++++++++++++++++++++++++++++++++++-- docs/async.md | 52 +-------------------------- httpx/_client.py | 16 ++++++++- tests/test_asgi.py | 37 ++++++++++++++----- tests/test_wsgi.py | 36 ++++++++++++++----- 6 files changed, 145 insertions(+), 72 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 47ac88c834..7950a5f320 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased +### Deprecated + +* The `app=...` shortcut has been deprecated. Use the explicit style of `transport=httpx.WSGITransport()` or `transport=httpx.ASGITransport()` instead. + ### Fixed * Respect the `http1` argument while configuring proxy transports. (#3023) diff --git a/docs/advanced/transports.md b/docs/advanced/transports.md index 2f3e00690d..7e0e21c6f9 100644 --- a/docs/advanced/transports.md +++ b/docs/advanced/transports.md @@ -42,7 +42,9 @@ You can configure an `httpx` client to call directly into a Python web applicati This is particularly useful for two main use-cases: * Using `httpx` as a client inside test cases. -* Mocking out external services during tests or in dev/staging environments. +* Mocking out external services during tests or in dev or staging environments. + +### Example Here's an example of integrating against a Flask application: @@ -57,12 +59,15 @@ app = Flask(__name__) def hello(): return "Hello World!" -with httpx.Client(app=app, base_url="http://testserver") as client: +transport = httpx.WSGITransport(app=app) +with httpx.Client(transport=transport, base_url="http://testserver") as client: r = client.get("/") assert r.status_code == 200 assert r.text == "Hello World!" ``` +### Configuration + For some more complex cases you might need to customize the WSGI transport. This allows you to: * Inspect 500 error responses rather than raise exceptions by setting `raise_app_exceptions=False`. @@ -78,6 +83,69 @@ with httpx.Client(transport=transport, base_url="http://testserver") as client: ... ``` +## ASGITransport + +You can configure an `httpx` client to call directly into an async Python web application using the ASGI protocol. + +This is particularly useful for two main use-cases: + +* Using `httpx` as a client inside test cases. +* Mocking out external services during tests or in dev or staging environments. + +### Example + +Let's take this Starlette application as an example: + +```python +from starlette.applications import Starlette +from starlette.responses import HTMLResponse +from starlette.routing import Route + + +async def hello(request): + return HTMLResponse("Hello World!") + + +app = Starlette(routes=[Route("/", hello)]) +``` + +We can make requests directly against the application, like so: + +```python +transport = httpx.ASGITransport(app=app) + +async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client: + r = await client.get("/") + assert r.status_code == 200 + assert r.text == "Hello World!" +``` + +### Configuration + +For some more complex cases you might need to customise the ASGI transport. This allows you to: + +* Inspect 500 error responses rather than raise exceptions by setting `raise_app_exceptions=False`. +* Mount the ASGI application at a subpath by setting `root_path`. +* Use a given client address for requests by setting `client`. + +For example: + +```python +# Instantiate a client that makes ASGI requests with a client IP of "1.2.3.4", +# on port 123. +transport = httpx.ASGITransport(app=app, client=("1.2.3.4", 123)) +async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client: + ... +``` + +See [the ASGI documentation](https://asgi.readthedocs.io/en/latest/specs/www.html#connection-scope) for more details on the `client` and `root_path` keys. + +### ASGI startup and shutdown + +It is not in the scope of HTTPX to trigger ASGI lifespan events of your app. + +However it is suggested to use `LifespanManager` from [asgi-lifespan](https://github.com/florimondmanca/asgi-lifespan#usage) in pair with `AsyncClient`. + ## Custom transports A transport instance must implement the low-level Transport API, which deals diff --git a/docs/async.md b/docs/async.md index d54a353d62..089d783191 100644 --- a/docs/async.md +++ b/docs/async.md @@ -191,54 +191,4 @@ anyio.run(main, backend='trio') ## Calling into Python Web Apps -Just as `httpx.Client` allows you to call directly into WSGI web applications, -the `httpx.AsyncClient` class allows you to call directly into ASGI web applications. - -Let's take this Starlette application as an example: - -```python -from starlette.applications import Starlette -from starlette.responses import HTMLResponse -from starlette.routing import Route - - -async def hello(request): - return HTMLResponse("Hello World!") - - -app = Starlette(routes=[Route("/", hello)]) -``` - -We can make requests directly against the application, like so: - -```pycon ->>> import httpx ->>> async with httpx.AsyncClient(app=app, base_url="http://testserver") as client: -... r = await client.get("/") -... assert r.status_code == 200 -... assert r.text == "Hello World!" -``` - -For some more complex cases you might need to customise the ASGI transport. This allows you to: - -* Inspect 500 error responses rather than raise exceptions by setting `raise_app_exceptions=False`. -* Mount the ASGI application at a subpath by setting `root_path`. -* Use a given client address for requests by setting `client`. - -For example: - -```python -# Instantiate a client that makes ASGI requests with a client IP of "1.2.3.4", -# on port 123. -transport = httpx.ASGITransport(app=app, client=("1.2.3.4", 123)) -async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client: - ... -``` - -See [the ASGI documentation](https://asgi.readthedocs.io/en/latest/specs/www.html#connection-scope) for more details on the `client` and `root_path` keys. - -## Startup/shutdown of ASGI apps - -It is not in the scope of HTTPX to trigger lifespan events of your app. - -However it is suggested to use `LifespanManager` from [asgi-lifespan](https://github.com/florimondmanca/asgi-lifespan#usage) in pair with `AsyncClient`. +For details on calling directly into ASGI applications, see [the `ASGITransport` docs](../advanced/transports#asgitransport). \ No newline at end of file diff --git a/httpx/_client.py b/httpx/_client.py index 1f2145d12e..e2c6702e0c 100644 --- a/httpx/_client.py +++ b/httpx/_client.py @@ -672,6 +672,13 @@ def __init__( if proxy: raise RuntimeError("Use either `proxy` or 'proxies', not both.") + if app: + message = ( + "The 'app' shortcut is now deprecated." + " Use the explicit style 'transport=WSGITransport(app=...)' instead." + ) + warnings.warn(message, DeprecationWarning) + allow_env_proxies = trust_env and app is None and transport is None proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) @@ -1411,7 +1418,14 @@ def __init__( if proxy: raise RuntimeError("Use either `proxy` or 'proxies', not both.") - allow_env_proxies = trust_env and app is None and transport is None + if app: + message = ( + "The 'app' shortcut is now deprecated." + " Use the explicit style 'transport=ASGITransport(app=...)' instead." + ) + warnings.warn(message, DeprecationWarning) + + allow_env_proxies = trust_env and transport is None proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) self._transport = self._init_transport( diff --git a/tests/test_asgi.py b/tests/test_asgi.py index 2971506097..ccc5526678 100644 --- a/tests/test_asgi.py +++ b/tests/test_asgi.py @@ -92,7 +92,8 @@ async def test_asgi_transport_no_body(): @pytest.mark.anyio async def test_asgi(): - async with httpx.AsyncClient(app=hello_world) as client: + transport = httpx.ASGITransport(app=hello_world) + async with httpx.AsyncClient(transport=transport) as client: response = await client.get("http://www.example.org/") assert response.status_code == 200 @@ -101,7 +102,8 @@ async def test_asgi(): @pytest.mark.anyio async def test_asgi_urlencoded_path(): - async with httpx.AsyncClient(app=echo_path) as client: + transport = httpx.ASGITransport(app=echo_path) + async with httpx.AsyncClient(transport=transport) as client: url = httpx.URL("http://www.example.org/").copy_with(path="/user@example.org") response = await client.get(url) @@ -111,7 +113,8 @@ async def test_asgi_urlencoded_path(): @pytest.mark.anyio async def test_asgi_raw_path(): - async with httpx.AsyncClient(app=echo_raw_path) as client: + transport = httpx.ASGITransport(app=echo_raw_path) + async with httpx.AsyncClient(transport=transport) as client: url = httpx.URL("http://www.example.org/").copy_with(path="/user@example.org") response = await client.get(url) @@ -124,7 +127,8 @@ async def test_asgi_raw_path_should_not_include_querystring_portion(): """ See https://github.com/encode/httpx/issues/2810 """ - async with httpx.AsyncClient(app=echo_raw_path) as client: + transport = httpx.ASGITransport(app=echo_raw_path) + async with httpx.AsyncClient(transport=transport) as client: url = httpx.URL("http://www.example.org/path?query") response = await client.get(url) @@ -134,7 +138,8 @@ async def test_asgi_raw_path_should_not_include_querystring_portion(): @pytest.mark.anyio async def test_asgi_upload(): - async with httpx.AsyncClient(app=echo_body) as client: + transport = httpx.ASGITransport(app=echo_body) + async with httpx.AsyncClient(transport=transport) as client: response = await client.post("http://www.example.org/", content=b"example") assert response.status_code == 200 @@ -143,7 +148,8 @@ async def test_asgi_upload(): @pytest.mark.anyio async def test_asgi_headers(): - async with httpx.AsyncClient(app=echo_headers) as client: + transport = httpx.ASGITransport(app=echo_headers) + async with httpx.AsyncClient(transport=transport) as client: response = await client.get("http://www.example.org/") assert response.status_code == 200 @@ -160,14 +166,16 @@ async def test_asgi_headers(): @pytest.mark.anyio async def test_asgi_exc(): - async with httpx.AsyncClient(app=raise_exc) as client: + transport = httpx.ASGITransport(app=raise_exc) + async with httpx.AsyncClient(transport=transport) as client: with pytest.raises(RuntimeError): await client.get("http://www.example.org/") @pytest.mark.anyio async def test_asgi_exc_after_response(): - async with httpx.AsyncClient(app=raise_exc_after_response) as client: + transport = httpx.ASGITransport(app=raise_exc_after_response) + async with httpx.AsyncClient(transport=transport) as client: with pytest.raises(RuntimeError): await client.get("http://www.example.org/") @@ -199,7 +207,8 @@ async def read_body(scope, receive, send): message = await receive() disconnect = message.get("type") == "http.disconnect" - async with httpx.AsyncClient(app=read_body) as client: + transport = httpx.ASGITransport(app=read_body) + async with httpx.AsyncClient(transport=transport) as client: response = await client.post("http://www.example.org/", content=b"example") assert response.status_code == 200 @@ -213,3 +222,13 @@ async def test_asgi_exc_no_raise(): response = await client.get("http://www.example.org/") assert response.status_code == 500 + + +@pytest.mark.anyio +async def test_deprecated_shortcut(): + """ + The `app=...` shortcut is now deprecated. + Use the explicit transport style instead. + """ + with pytest.warns(DeprecationWarning): + httpx.AsyncClient(app=hello_world) diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py index 3565a48c92..0134bee854 100644 --- a/tests/test_wsgi.py +++ b/tests/test_wsgi.py @@ -92,41 +92,47 @@ def log_to_wsgi_log_buffer(environ, start_response): def test_wsgi(): - client = httpx.Client(app=application_factory([b"Hello, World!"])) + transport = httpx.WSGITransport(app=application_factory([b"Hello, World!"])) + client = httpx.Client(transport=transport) response = client.get("http://www.example.org/") assert response.status_code == 200 assert response.text == "Hello, World!" def test_wsgi_upload(): - client = httpx.Client(app=echo_body) + transport = httpx.WSGITransport(app=echo_body) + client = httpx.Client(transport=transport) response = client.post("http://www.example.org/", content=b"example") assert response.status_code == 200 assert response.text == "example" def test_wsgi_upload_with_response_stream(): - client = httpx.Client(app=echo_body_with_response_stream) + transport = httpx.WSGITransport(app=echo_body_with_response_stream) + client = httpx.Client(transport=transport) response = client.post("http://www.example.org/", content=b"example") assert response.status_code == 200 assert response.text == "example" def test_wsgi_exc(): - client = httpx.Client(app=raise_exc) + transport = httpx.WSGITransport(app=raise_exc) + client = httpx.Client(transport=transport) with pytest.raises(ValueError): client.get("http://www.example.org/") def test_wsgi_http_error(): - client = httpx.Client(app=partial(raise_exc, exc=RuntimeError)) + transport = httpx.WSGITransport(app=partial(raise_exc, exc=RuntimeError)) + client = httpx.Client(transport=transport) with pytest.raises(RuntimeError): client.get("http://www.example.org/") def test_wsgi_generator(): output = [b"", b"", b"Some content", b" and more content"] - client = httpx.Client(app=application_factory(output)) + transport = httpx.WSGITransport(app=application_factory(output)) + client = httpx.Client(transport=transport) response = client.get("http://www.example.org/") assert response.status_code == 200 assert response.text == "Some content and more content" @@ -134,7 +140,8 @@ def test_wsgi_generator(): def test_wsgi_generator_empty(): output = [b"", b"", b"", b""] - client = httpx.Client(app=application_factory(output)) + transport = httpx.WSGITransport(app=application_factory(output)) + client = httpx.Client(transport=transport) response = client.get("http://www.example.org/") assert response.status_code == 200 assert response.text == "" @@ -170,7 +177,8 @@ def app(environ, start_response): server_port = environ["SERVER_PORT"] return hello_world_app(environ, start_response) - client = httpx.Client(app=app) + transport = httpx.WSGITransport(app=app) + client = httpx.Client(transport=transport) response = client.get(url) assert response.status_code == 200 assert response.text == "Hello, World!" @@ -186,9 +194,19 @@ def app(environ, start_response): start_response("200 OK", [("Content-Type", "text/plain")]) return [b"success"] - with httpx.Client(app=app, base_url="http://testserver") as client: + transport = httpx.WSGITransport(app=app) + with httpx.Client(transport=transport, base_url="http://testserver") as client: response = client.get("/") assert response.status_code == 200 assert response.text == "success" assert server_protocol == "HTTP/1.1" + + +def test_deprecated_shortcut(): + """ + The `app=...` shortcut is now deprecated. + Use the explicit transport style instead. + """ + with pytest.warns(DeprecationWarning): + httpx.Client(app=application_factory([b"Hello, World!"])) From c51af4ba52d4103517a91f770e617cc579803b2d Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 6 Feb 2024 11:40:35 +0100 Subject: [PATCH 24/26] Extensions docs (#3080) * Deprecate app=... in favour of explicit WSGITransport/ASGITransport * Linting * Linting * Update WSGITransport and ASGITransport docs * Deprecate app * Drop deprecation tests * Add CHANGELOG * Deprecate 'app=...' shortcut, rather than removing it. * Update CHANGELOG * Fix test_asgi.test_deprecated_shortcut * Extensions docs * Include 'extensions' in docs index * Update docs/advanced/extensions.md Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --------- Co-authored-by: Kar Petrosyan <92274156+karpetrosyan@users.noreply.github.com> --- docs/advanced/extensions.md | 201 ++++++++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 202 insertions(+) create mode 100644 docs/advanced/extensions.md diff --git a/docs/advanced/extensions.md b/docs/advanced/extensions.md new file mode 100644 index 0000000000..fa317eeb60 --- /dev/null +++ b/docs/advanced/extensions.md @@ -0,0 +1,201 @@ +# Extensions + +Request and response extensions provide a untyped space where additional information may be added. + +Extensions should be used for features that may not be available on all transports, and that do not fit neatly into [the simplified request/response model](https://www.encode.io/httpcore/extensions/) that the underlying `httpcore` pacakge uses as it's API. + +Several extensions are supported on the request: + +```python +# Request timeouts actually implemented as an extension on +# the request, ensuring that they are passed throughout the +# entire call stack. +client = httpx.Client() +response = client.get( + "https://www.example.com", + extensions={"timeout": {"connect": 5.0}} +) +response.request.extensions["timeout"] +{"connect": 5.0} +``` + +And on the response: + +```python +client = httpx.Client() +response = client.get("https://www.example.com") +print(response.extensions["http_version"]) # b"HTTP/1.1" +# Other server responses could have been +# b"HTTP/0.9", b"HTTP/1.0", or b"HTTP/1.1" +``` + +## Request Extensions + +### `"trace"` + +The trace extension allows a callback handler to be installed to monitor the internal +flow of events within the underlying `httpcore` transport. + +The simplest way to explain this is with an example: + +```python +import httpx + +def log(event_name, info): + print(event_name, info) + +client = httpx.Client() +response = client.get("https://www.example.com/", extensions={"trace": log}) +# connection.connect_tcp.started {'host': 'www.example.com', 'port': 443, 'local_address': None, 'timeout': None} +# connection.connect_tcp.complete {'return_value': } +# connection.start_tls.started {'ssl_context': , 'server_hostname': b'www.example.com', 'timeout': None} +# connection.start_tls.complete {'return_value': } +# http11.send_request_headers.started {'request': } +# http11.send_request_headers.complete {'return_value': None} +# http11.send_request_body.started {'request': } +# http11.send_request_body.complete {'return_value': None} +# http11.receive_response_headers.started {'request': } +# http11.receive_response_headers.complete {'return_value': (b'HTTP/1.1', 200, b'OK', [(b'Age', b'553715'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Thu, 21 Oct 2021 17:08:42 GMT'), (b'Etag', b'"3147526947+ident"'), (b'Expires', b'Thu, 28 Oct 2021 17:08:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS (nyb/1DCD)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'1256')])} +# http11.receive_response_body.started {'request': } +# http11.receive_response_body.complete {'return_value': None} +# http11.response_closed.started {} +# http11.response_closed.complete {'return_value': None} +``` + +The `event_name` and `info` arguments here will be one of the following: + +* `{event_type}.{event_name}.started`, `` +* `{event_type}.{event_name}.complete`, `{"return_value": <...>}` +* `{event_type}.{event_name}.failed`, `{"exception": <...>}` + +Note that when using async code the handler function passed to `"trace"` must be an `async def ...` function. + +The following event types are currently exposed... + +**Establishing the connection** + +* `"connection.connect_tcp"` +* `"connection.connect_unix_socket"` +* `"connection.start_tls"` + +**HTTP/1.1 events** + +* `"http11.send_request_headers"` +* `"http11.send_request_body"` +* `"http11.receive_response"` +* `"http11.receive_response_body"` +* `"http11.response_closed"` + +**HTTP/2 events** + +* `"http2.send_connection_init"` +* `"http2.send_request_headers"` +* `"http2.send_request_body"` +* `"http2.receive_response_headers"` +* `"http2.receive_response_body"` +* `"http2.response_closed"` + +The exact set of trace events may be subject to change across different versions of `httpcore`. If you need to rely on a particular set of events it is recommended that you pin installation of the package to a fixed version. + +### `"sni_hostname"` + +The server's hostname, which is used to confirm the hostname supplied by the SSL certificate. + +If you want to connect to an explicit IP address rather than using the standard DNS hostname lookup, then you'll need to use this request extension. + +For example: + +``` python +# Connect to '185.199.108.153' but use 'www.encode.io' in the Host header, +# and use 'www.encode.io' when SSL verifying the server hostname. +client = httpx.Client() +headers = {"Host": "www.encode.io"} +extensions = {"sni_hostname": "www.encode.io"} +response = client.get( + "https://185.199.108.153/path", + headers=headers, + extensions=extensions +) +``` + +### `"timeout"` + +A dictionary of `str: Optional[float]` timeout values. + +May include values for `'connect'`, `'read'`, `'write'`, or `'pool'`. + +For example: + +```python +# Timeout if a connection takes more than 5 seconds to established, or if +# we are blocked waiting on the connection pool for more than 10 seconds. +client = httpx.Client() +response = client.get( + "https://www.example.com", + extensions={"timeout": {"connect": 5.0, "pool": 10.0}} +) +``` + +This extension is how the `httpx` timeouts are implemented, ensuring that the timeout values are associated with the request instance and passed throughout the stack. You shouldn't typically be working with this extension directly, but use the higher level `timeout` API instead. + +## Response Extensions + +### `"http_version"` + +The HTTP version, as bytes. Eg. `b"HTTP/1.1"`. + +When using HTTP/1.1 the response line includes an explicit version, and the value of this key could feasibly be one of `b"HTTP/0.9"`, `b"HTTP/1.0"`, or `b"HTTP/1.1"`. + +When using HTTP/2 there is no further response versioning included in the protocol, and the value of this key will always be `b"HTTP/2"`. + +### `"reason_phrase"` + +The reason-phrase of the HTTP response, as bytes. For example `b"OK"`. Some servers may include a custom reason phrase, although this is not recommended. + +HTTP/2 onwards does not include a reason phrase on the wire. + +When no key is included, a default based on the status code may be used. + +### `"stream_id"` + +When HTTP/2 is being used the `"stream_id"` response extension can be accessed to determine the ID of the data stream that the response was sent on. + +### `"network_stream"` + +The `"network_stream"` extension allows developers to handle HTTP `CONNECT` and `Upgrade` requests, by providing an API that steps outside the standard request/response model, and can directly read or write to the network. + +The interface provided by the network stream: + +* `read(max_bytes, timeout = None) -> bytes` +* `write(buffer, timeout = None)` +* `close()` +* `start_tls(ssl_context, server_hostname = None, timeout = None) -> NetworkStream` +* `get_extra_info(info) -> Any` + +This API can be used as the foundation for working with HTTP proxies, WebSocket upgrades, and other advanced use-cases. + +See the [network backends documentation](https://www.encode.io/httpcore/network-backends/) for more information on working directly with network streams. + +**Extra network information** + +The network stream abstraction also allows access to various low-level information that may be exposed by the underlying socket: + +```python +response = httpx.get("https://www.example.com") +network_stream = response.extensions["network_stream"] + +client_addr = network_stream.get_extra_info("client_addr") +server_addr = network_stream.get_extra_info("server_addr") +print("Client address", client_addr) +print("Server address", server_addr) +``` + +The socket SSL information is also available through this interface, although you need to ensure that the underlying connection is still open, in order to access it... + +```python +with httpx.stream("GET", "https://www.example.com") as response: + network_stream = response.extensions["network_stream"] + + ssl_object = network_stream.get_extra_info("ssl_object") + print("TLS version", ssl_object.version()) +``` \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 7c6fcbd56d..f6e4dfde9b 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -34,6 +34,7 @@ nav: - Event Hooks: 'advanced/event-hooks.md' - Transports: 'advanced/transports.md' - Text Encodings: 'advanced/text-encodings.md' + - Extensions: 'advanced/extensions.md' - Guides: - Async Support: 'async.md' - HTTP/2 Support: 'http2.md' From 3faa4a8f2e0d406167b913bbfd3afab087662d03 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Wed, 14 Feb 2024 11:14:02 +0000 Subject: [PATCH 25/26] Improve 'Custom transports' docs (#3081) --- docs/advanced/transports.md | 85 ++++++++++++++++++++++++++++++------- 1 file changed, 69 insertions(+), 16 deletions(-) diff --git a/docs/advanced/transports.md b/docs/advanced/transports.md index 7e0e21c6f9..d4e7615d38 100644 --- a/docs/advanced/transports.md +++ b/docs/advanced/transports.md @@ -2,7 +2,7 @@ HTTPX's `Client` also accepts a `transport` argument. This argument allows you to provide a custom Transport object that will be used to perform the actual sending of the requests. -## HTTPTransport +## HTTP Transport For some advanced configuration you might need to instantiate a transport class directly, and pass it to the client instance. One example is the @@ -83,7 +83,7 @@ with httpx.Client(transport=transport, base_url="http://testserver") as client: ... ``` -## ASGITransport +## ASGI Transport You can configure an `httpx` client to call directly into an async Python web application using the ASGI protocol. @@ -148,7 +148,7 @@ However it is suggested to use `LifespanManager` from [asgi-lifespan](https://gi ## Custom transports -A transport instance must implement the low-level Transport API, which deals +A transport instance must implement the low-level Transport API which deals with sending a single request, and returning a response. You should either subclass `httpx.BaseTransport` to implement a transport to use with `Client`, or subclass `httpx.AsyncBaseTransport` to implement a transport to @@ -166,28 +166,81 @@ A complete example of a custom transport implementation would be: import json import httpx - class HelloWorldTransport(httpx.BaseTransport): """ A mock transport that always returns a JSON "Hello, world!" response. """ def handle_request(self, request): - message = {"text": "Hello, world!"} - content = json.dumps(message).encode("utf-8") - stream = httpx.ByteStream(content) - headers = [(b"content-type", b"application/json")] - return httpx.Response(200, headers=headers, stream=stream) + return httpx.Response(200, json={"text": "Hello, world!"}) ``` -Which we can use in the same way: +Or this example, which uses a custom transport and `httpx.Mounts` to always redirect `http://` requests. -```pycon ->>> import httpx ->>> client = httpx.Client(transport=HelloWorldTransport()) ->>> response = client.get("https://example.org/") ->>> response.json() -{"text": "Hello, world!"} +```python +class HTTPSRedirect(httpx.BaseTransport): + """ + A transport that always redirects to HTTPS. + """ + def handle_request(self, request): + url = request.url.copy_with(scheme="https") + return httpx.Response(303, headers={"Location": str(url)}) + +# A client where any `http` requests are always redirected to `https` +transport = httpx.Mounts({ + 'http://': HTTPSRedirect() + 'https://': httpx.HTTPTransport() +}) +client = httpx.Client(transport=transport) +``` + +A useful pattern here is custom transport classes that wrap the default HTTP implementation. For example... + +```python +class DebuggingTransport(httpx.BaseTransport): + def __init__(self, **kwargs): + self._wrapper = httpx.HTTPTransport(**kwargs) + + def handle_request(self, request): + print(f">>> {request}") + response = self._wrapper.handle_request(request) + print(f"<<< {response}") + return response + + def close(self): + self._wrapper.close() + +transport = DebuggingTransport() +client = httpx.Client(transport=transport) +``` + +Here's another case, where we're using a round-robin across a number of different proxies... + +```python +class ProxyRoundRobin(httpx.BaseTransport): + def __init__(self, proxies, **kwargs): + self._transports = [ + httpx.HTTPTransport(proxy=proxy, **kwargs) + for proxy in proxies + ] + self._idx = 0 + + def handle_request(self, request): + transport = self._transports[self._idx] + self._idx = (self._idx + 1) % len(self._transports) + return transport.handle_request(request) + + def close(self): + for transport in self._transports: + transport.close() + +proxies = [ + httpx.Proxy("http://127.0.0.1:8081"), + httpx.Proxy("http://127.0.0.1:8082"), + httpx.Proxy("http://127.0.0.1:8083"), +] +transport = ProxyRoundRobin(proxies=proxies) +client = httpx.Client(transport=transport) ``` ## Mock transports From 326b9431c761e1ef1e00b9f760d1f654c8db48c6 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Wed, 21 Feb 2024 13:06:19 +0000 Subject: [PATCH 26/26] Version 0.27.0 (#3095) * Version 0.27.0 * Update CHANGELOG.md (#3097) wrong year I think? I'm new to github so idk if I'm doing this right Co-authored-by: ReadyRainFor <119354484+ReadyRainFor@users.noreply.github.com> * Update CHANGELOG.md * Update CHANGELOG.md --------- Co-authored-by: Rain <119354484+Rainkenstein@users.noreply.github.com> Co-authored-by: ReadyRainFor <119354484+ReadyRainFor@users.noreply.github.com> --- CHANGELOG.md | 2 ++ httpx/__version__.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7950a5f320..c063c0814a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased +## 0.27.0 (21st February, 2024) + ### Deprecated * The `app=...` shortcut has been deprecated. Use the explicit style of `transport=httpx.WSGITransport()` or `transport=httpx.ASGITransport()` instead. diff --git a/httpx/__version__.py b/httpx/__version__.py index 3edc842c69..c121a898de 100644 --- a/httpx/__version__.py +++ b/httpx/__version__.py @@ -1,3 +1,3 @@ __title__ = "httpx" __description__ = "A next generation HTTP client, for Python 3." -__version__ = "0.26.0" +__version__ = "0.27.0"