diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 0000000000..e1a953dc97
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,235 @@
+# Contributing
+
+Thank you for being interested in contributing to HTTPX.
+There are many ways you can contribute to the project:
+
+- Try HTTPX and [report bugs/issues you find](https://github.com/encode/httpx/issues/new)
+- [Implement new features](https://github.com/encode/httpx/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
+- [Review Pull Requests of others](https://github.com/encode/httpx/pulls)
+- Write documentation
+- Participate in discussions
+
+## Reporting Bugs or Other Issues
+
+Found something that HTTPX should support?
+Stumbled upon some unexpected behaviour?
+
+Contributions should generally start out with [a discussion](https://github.com/encode/httpx/discussions).
+Possible bugs may be raised as a "Potential Issue" discussion, feature requests may
+be raised as an "Ideas" discussion. We can then determine if the discussion needs
+to be escalated into an "Issue" or not, or if we'd consider a pull request.
+
+Try to be more descriptive as you can and in case of a bug report,
+provide as much information as possible like:
+
+- OS platform
+- Python version
+- Installed dependencies and versions (`python -m pip freeze`)
+- Code snippet
+- Error traceback
+
+You should always try to reduce any examples to the *simplest possible case*
+that demonstrates the issue.
+
+Some possibly useful tips for narrowing down potential issues...
+
+- Does the issue exist on HTTP/1.1, or HTTP/2, or both?
+- Does the issue exist with `Client`, `AsyncClient`, or both?
+- When using `AsyncClient` does the issue exist when using `asyncio` or `trio`, or both?
+
+## Development
+
+To start developing HTTPX create a **fork** of the
+[HTTPX repository](https://github.com/encode/httpx) on GitHub.
+
+Then clone your fork with the following command replacing `YOUR-USERNAME` with
+your GitHub username:
+
+```shell
+$ git clone https://github.com/YOUR-USERNAME/httpx
+```
+
+You can now install the project and its dependencies using:
+
+```shell
+$ cd httpx
+$ scripts/install
+```
+
+## Testing and Linting
+
+We use custom shell scripts to automate testing, linting,
+and documentation building workflow.
+
+To run the tests, use:
+
+```shell
+$ scripts/test
+```
+
+!!! warning
+ The test suite spawns testing servers on ports **8000** and **8001**.
+ Make sure these are not in use, so the tests can run properly.
+
+You can run a single test script like this:
+
+```shell
+$ scripts/test -- tests/test_multipart.py
+```
+
+To run the code auto-formatting:
+
+```shell
+$ scripts/lint
+```
+
+Lastly, to run code checks separately (they are also run as part of `scripts/test`), run:
+
+```shell
+$ scripts/check
+```
+
+## Documenting
+
+Documentation pages are located under the `docs/` folder.
+
+To run the documentation site locally (useful for previewing changes), use:
+
+```shell
+$ scripts/docs
+```
+
+## Resolving Build / CI Failures
+
+Once you've submitted your pull request, the test suite will automatically run, and the results will show up in GitHub.
+If the test suite fails, you'll want to click through to the "Details" link, and try to identify why the test suite failed.
+
+
+
+
+
+Here are some common ways the test suite can fail:
+
+### Check Job Failed
+
+
+
+
+
+This job failing means there is either a code formatting issue or type-annotation issue.
+You can look at the job output to figure out why it's failed or within a shell run:
+
+```shell
+$ scripts/check
+```
+
+It may be worth it to run `$ scripts/lint` to attempt auto-formatting the code
+and if that job succeeds commit the changes.
+
+### Docs Job Failed
+
+This job failing means the documentation failed to build. This can happen for
+a variety of reasons like invalid markdown or missing configuration within `mkdocs.yml`.
+
+### Python 3.X Job Failed
+
+
+
+
+
+This job failing means the unit tests failed or not all code paths are covered by unit tests.
+
+If tests are failing you will see this message under the coverage report:
+
+`=== 1 failed, 435 passed, 1 skipped, 1 xfailed in 11.09s ===`
+
+If tests succeed but coverage doesn't reach our current threshold, you will see this
+message under the coverage report:
+
+`FAIL Required test coverage of 100% not reached. Total coverage: 99.00%`
+
+## Releasing
+
+*This section is targeted at HTTPX maintainers.*
+
+Before releasing a new version, create a pull request that includes:
+
+- **An update to the changelog**:
+ - We follow the format from [keepachangelog](https://keepachangelog.com/en/1.0.0/).
+ - [Compare](https://github.com/encode/httpx/compare/) `master` with the tag of the latest release, and list all entries that are of interest to our users:
+ - Things that **must** go in the changelog: added, changed, deprecated or removed features, and bug fixes.
+ - Things that **should not** go in the changelog: changes to documentation, tests or tooling.
+ - Try sorting entries in descending order of impact / importance.
+ - Keep it concise and to-the-point. 🎯
+- **A version bump**: see `__version__.py`.
+
+For an example, see [#1006](https://github.com/encode/httpx/pull/1006).
+
+Once the release PR is merged, create a
+[new release](https://github.com/encode/httpx/releases/new) including:
+
+- Tag version like `0.13.3`.
+- Release title `Version 0.13.3`
+- Description copied from the changelog.
+
+Once created this release will be automatically uploaded to PyPI.
+
+If something goes wrong with the PyPI job the release can be published using the
+`scripts/publish` script.
+
+## Development proxy setup
+
+To test and debug requests via a proxy it's best to run a proxy server locally.
+Any server should do but HTTPCore's test suite uses
+[`mitmproxy`](https://mitmproxy.org/) which is written in Python, it's fully
+featured and has excellent UI and tools for introspection of requests.
+
+You can install `mitmproxy` using `pip install mitmproxy` or [several
+other ways](https://docs.mitmproxy.org/stable/overview-installation/).
+
+`mitmproxy` does require setting up local TLS certificates for HTTPS requests,
+as its main purpose is to allow developers to inspect requests that pass through
+it. We can set them up follows:
+
+1. [`pip install trustme-cli`](https://github.com/sethmlarson/trustme-cli/).
+2. `trustme-cli -i example.org www.example.org`, assuming you want to test
+connecting to that domain, this will create three files: `server.pem`,
+`server.key` and `client.pem`.
+3. `mitmproxy` requires a PEM file that includes the private key and the
+certificate so we need to concatenate them:
+`cat server.key server.pem > server.withkey.pem`.
+4. Start the proxy server `mitmproxy --certs server.withkey.pem`, or use the
+[other mitmproxy commands](https://docs.mitmproxy.org/stable/) with different
+UI options.
+
+At this point the server is ready to start serving requests, you'll need to
+configure HTTPX as described in the
+[proxy section](https://www.python-httpx.org/advanced/#http-proxying) and
+the [SSL certificates section](https://www.python-httpx.org/advanced/#ssl-certificates),
+this is where our previously generated `client.pem` comes in:
+
+```
+import httpx
+
+proxies = {"all": "http://127.0.0.1:8080/"}
+
+with httpx.Client(proxies=proxies, verify="/path/to/client.pem") as client:
+ response = client.get("https://example.org")
+ print(response.status_code) # should print 200
+```
+
+Note, however, that HTTPS requests will only succeed to the host specified
+in the SSL/TLS certificate we generated, HTTPS requests to other hosts will
+raise an error like:
+
+```
+ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate
+verify failed: Hostname mismatch, certificate is not valid for
+'duckduckgo.com'. (_ssl.c:1108)
+```
+
+If you want to make requests to more hosts you'll need to regenerate the
+certificates and include all the hosts you intend to connect to in the
+seconds step, i.e.
+
+`trustme-cli -i example.org www.example.org duckduckgo.com www.duckduckgo.com`
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 0000000000..2f87d94ca1
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1 @@
+github: encode
diff --git a/.github/ISSUE_TEMPLATE/1-issue.md b/.github/ISSUE_TEMPLATE/1-issue.md
new file mode 100644
index 0000000000..5c0f8af677
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/1-issue.md
@@ -0,0 +1,16 @@
+---
+name: Issue
+about: Please only raise an issue if you've been advised to do so after discussion. Thanks! 🙏
+---
+
+The starting point for issues should usually be a discussion...
+
+https://github.com/encode/httpx/discussions
+
+Possible bugs may be raised as a "Potential Issue" discussion, feature requests may be raised as an "Ideas" discussion. We can then determine if the discussion needs to be escalated into an "Issue" or not.
+
+This will help us ensure that the "Issues" list properly reflects ongoing or needed work on the project.
+
+---
+
+- [ ] Initially raised as discussion #...
diff --git a/.github/ISSUE_TEMPLATE/2-bug-report.md b/.github/ISSUE_TEMPLATE/2-bug-report.md
deleted file mode 100644
index a206030729..0000000000
--- a/.github/ISSUE_TEMPLATE/2-bug-report.md
+++ /dev/null
@@ -1,61 +0,0 @@
----
-name: Bug report
-about: Report a bug to help improve this project
----
-
-### Checklist
-
-
-
-- [ ] The bug is reproducible against the latest release and/or `master`.
-- [ ] There are no similar issues or pull requests to fix it yet.
-
-### Describe the bug
-
-
-
-### To reproduce
-
-
-
-### Expected behavior
-
-
-
-### Actual behavior
-
-
-
-### Debugging material
-
-
-
-### Environment
-
-- OS:
-- Python version:
-- HTTPX version:
-- Async environment:
-- HTTP proxy:
-- Custom certificates:
-
-### Additional context
-
-
diff --git a/.github/ISSUE_TEMPLATE/3-feature-request.md b/.github/ISSUE_TEMPLATE/3-feature-request.md
deleted file mode 100644
index a4237e2840..0000000000
--- a/.github/ISSUE_TEMPLATE/3-feature-request.md
+++ /dev/null
@@ -1,34 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for this project.
----
-
-### Checklist
-
-
-
-- [ ] There are no similar issues or pull requests for this yet.
-- [ ] I discussed this idea on the [community chat](https://gitter.im/encode/community) and feedback is positive.
-
-### Is your feature related to a problem? Please describe.
-
-
-
-## Describe the solution you would like.
-
-
-
-## Describe alternatives you considered
-
-
-
-## Additional context
-
-
-
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 2ad6e8e270..a491aa3502 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,7 +1,11 @@
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
-blank_issues_enabled: true
+blank_issues_enabled: false
contact_links:
-- name: Question
+- name: Discussions
+ url: https://github.com/encode/httpx/discussions
+ about: >
+ The "Discussions" forum is where you want to start. 💖
+- name: Chat
url: https://gitter.im/encode/community
about: >
- Ask a question
+ Our community chat forum.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..13b7dfe1da
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,9 @@
+The starting point for contributions should usually be [a discussion](https://github.com/encode/httpx/discussions)
+
+Simple documentation typos may be raised as stand-alone pull requests, but otherwise
+please ensure you've discussed the your proposal prior to issuing a pull request.
+
+This will help us direct work appropriately, and ensure that any suggested changes
+have been okayed by the maintainers.
+
+- [ ] Initially raised as discussion #...
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 992f4e4e38..46384c5963 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,14 +4,56 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
-## 0.17.1
+## 0.18.0 (27th April, 2021)
+
+The 0.18.x release series formalises our low-level Transport API, introducing the base classes `httpx.BaseTransport` and `httpx.AsyncBaseTransport`.
+
+See the "[Writing custom transports](https://www.python-httpx.org/advanced/#writing-custom-transports)" documentation and the [`httpx.BaseTransport.handle_request()`](https://github.com/encode/httpx/blob/397aad98fdc8b7580a5fc3e88f1578b4302c6382/httpx/_transports/base.py#L77-L147) docstring for more complete details on implementing custom transports.
+
+Pull request #1522 includes a checklist of differences from the previous `httpcore` transport API, for developers implementing custom transports.
+
+The following API changes have been issuing deprecation warnings since 0.17.0 onwards, and are now fully deprecated...
+
+* You should now use httpx.codes consistently instead of httpx.StatusCodes.
+* Use limits=... instead of pool_limits=....
+* Use proxies={"http://": ...} instead of proxies={"http": ...} for scheme-specific mounting.
+
+### Changed
+
+* Transport instances now inherit from `httpx.BaseTransport` or `httpx.AsyncBaseTransport`,
+ and should implement either the `handle_request` method or `handle_async_request` method. (Pull #1522, #1550)
+* The `response.ext` property and `Response(ext=...)` argument are now named `extensions`. (Pull #1522)
+* The recommendation to not use `data=` in favour of `content=` has now been escalated to a deprecation warning. (Pull #1573)
+* Drop `Response(on_close=...)` from API, since it was a bit of leaking implementation detail. (Pull #1572)
+* When using a client instance, cookies should always be set on the client, rather than on a per-request basis. We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur. (Pull #1574)
+* The runtime exception `httpx.ResponseClosed` is now named `httpx.StreamClosed`. (#1584)
+* The `httpx.QueryParams` model now presents an immutable interface. There is a discussion on [the design and motivation here](https://github.com/encode/httpx/discussions/1599). Use `client.params = client.params.merge(...)` instead of `client.params.update(...)`. The basic query manipulation methods are `query.set(...)`, `query.add(...)`, and `query.remove()`. (#1600)
+
+### Added
+
+* The `Request` and `Response` classes can now be serialized using pickle. (#1579)
+* Handle `data={"key": [None|int|float|bool]}` cases. (Pull #1539)
+* Support `httpx.URL(**kwargs)`, for example `httpx.URL(scheme="https", host="www.example.com", path="/')`, or `httpx.URL("https://www.example.com/", username="tom@gmail.com", password="123 456")`. (Pull #1601)
+* Support `url.copy_with(params=...)`. (Pull #1601)
+* Add `url.params` parameter, returning an immutable `QueryParams` instance. (Pull #1601)
+* Support query manipulation methods on the URL class. These are `url.copy_set_param()`, `url.copy_add_param()`, `url.copy_remove_param()`, `url.copy_merge_params()`. (Pull #1601)
+* The `httpx.URL` class now performs port normalization, so `:80` ports are stripped from `http` URLs and `:443` ports are stripped from `https` URLs. (Pull #1603)
+* The `URL.host` property returns unicode strings for internationalized domain names. The `URL.raw_host` property returns byte strings with IDNA escaping applied. (Pull #1590)
+
+### Fixed
+
+* Fix Content-Length for cases of `files=...` where unicode string is used as the file content. (Pull #1537)
+* Fix some cases of merging relative URLs against `Client(base_url=...)`. (Pull #1532)
+* The `request.content` attribute is now always available except for streaming content, which requires an explicit `.read()`. (Pull #1583)
+
+## 0.17.1 (March 15th, 2021)
### Fixed
* Type annotation on `CertTypes` allows `keyfile` and `password` to be optional. (Pull #1503)
* Fix httpcore pinned version. (Pull #1495)
-## 0.17.0
+## 0.17.0 (February 28th, 2021)
### Added
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index 73a8b3db48..0000000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,5 +0,0 @@
-#### Thanks for considering contributing to HTTPX!
-
-Our [documentation on contributing to HTTPX](https://www.encode.io/httpx/contributing/)
-contains information on how to report bugs, write and test new features, and
-debug issues with your own changes.
diff --git a/LICENSE.md b/LICENSE.md
index 8963b9f219..ab79d16a3f 100644
--- a/LICENSE.md
+++ b/LICENSE.md
@@ -1,27 +1,12 @@
Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/).
All rights reserved.
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-* Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
+* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-* Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-* Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
+* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/README.md b/README.md
index 66b2f8688f..e85a0142c9 100644
--- a/README.md
+++ b/README.md
@@ -16,7 +16,7 @@
HTTPX is a fully featured HTTP client for Python 3, which provides sync and async APIs, and support for both HTTP/1.1 and HTTP/2.
**Note**: _HTTPX should be considered in beta. We believe we've got the public API to
-a stable point now, but would strongly recommend pinning your dependencies to the `0.17.*`
+a stable point now, but would strongly recommend pinning your dependencies to the `0.18.*`
release, so that you're able to properly review [API changes between package updates](https://github.com/encode/httpx/blob/master/CHANGELOG.md). A 1.0 release is expected to be issued sometime in 2021._
---
@@ -122,6 +122,7 @@ The HTTPX project relies on these excellent libraries:
* `rfc3986` - URL parsing & normalization.
* `idna` - Internationalized domain name support.
* `sniffio` - Async library autodetection.
+* `async_generator` - Backport support for `contextlib.asynccontextmanager`. *(Only required for Python 3.6)*
* `brotlipy` - Decoding for "brotli" compressed responses. *(Optional)*
A huge amount of credit is due to `requests` for the API layout that
diff --git a/docs/advanced.md b/docs/advanced.md
index 61bf4c1938..4438cb2d6f 100644
--- a/docs/advanced.md
+++ b/docs/advanced.md
@@ -667,7 +667,7 @@ You can control the connection pool size using the `limits` keyword
argument on the client. It takes instances of `httpx.Limits` which define:
- `max_keepalive`, number of allowable keep-alive connections, or `None` to always
-allow. (Defaults 10)
+allow. (Defaults 20)
- `max_connections`, maximum number of allowable connections, or` None` for no limits.
(Default 100)
@@ -945,6 +945,32 @@ client = httpx.Client(verify=False)
The `client.get(...)` method and other request methods *do not* support changing the SSL settings on a per-request basis. If you need different SSL settings in different cases you should use more that one client instance, with different settings on each. Each client will then be using an isolated connection pool with a specific fixed SSL configuration on all connections within that pool.
+### Client Side Certificates
+
+You can also specify a local cert to use as a client-side certificate, either a path to an SSL certificate file, or two-tuple of (certificate file, key file), or a three-tuple of (certificate file, key file, password)
+
+```python
+import httpx
+
+r = httpx.get("https://example.org", cert="path/to/client.pem")
+```
+
+Alternatively,
+
+```pycon
+>>> cert = ("path/to/client.pem", "path/to/client.key")
+>>> httpx.get("https://example.org", cert=cert)
+
+```
+
+or
+
+```pycon
+>>> cert = ("path/to/client.pem", "path/to/client.key", "password")
+>>> httpx.get("https://example.org", cert=cert)
+
+```
+
### Making HTTPS requests to a local server
When making requests to local servers, such as a development server running on `localhost`, you will typically be using unencrypted HTTP connections.
@@ -1015,31 +1041,39 @@ This [public gist](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165
### Writing custom transports
-A transport instance must implement the Transport API defined by
-[`httpcore`](https://www.encode.io/httpcore/api/). You
-should either subclass `httpcore.AsyncHTTPTransport` to implement a transport to
-use with `AsyncClient`, or subclass `httpcore.SyncHTTPTransport` to implement a
-transport to use with `Client`.
+A transport instance must implement the low-level Transport API, which deals
+with sending a single request, and returning a response. You should either
+subclass `httpx.BaseTransport` to implement a transport to use with `Client`,
+or subclass `httpx.AsyncBaseTransport` to implement a transport to
+use with `AsyncClient`.
+
+At the layer of the transport API we're using plain primitives.
+No `Request` or `Response` models, no fancy `URL` or `Header` handling.
+This strict point of cut-off provides a clear design separation between the
+HTTPX API, and the low-level network handling.
+
+See the `handle_request` and `handle_async_request` docstrings for more details
+on the specifics of the Transport API.
A complete example of a custom transport implementation would be:
```python
import json
-import httpcore
+import httpx
-class HelloWorldTransport(httpcore.SyncHTTPTransport):
+class HelloWorldTransport(httpx.BaseTransport):
"""
A mock transport that always returns a JSON "Hello, world!" response.
"""
- def request(self, method, url, headers=None, stream=None, ext=None):
+ def handle_request(self, method, url, headers, stream, extensions):
message = {"text": "Hello, world!"}
content = json.dumps(message).encode("utf-8")
- stream = httpcore.PlainByteStream(content)
+ stream = httpx.ByteStream(content)
headers = [(b"content-type", b"application/json")]
- ext = {"http_version": b"HTTP/1.1"}
- return 200, headers, stream, ext
+ extensions = {}
+ return 200, headers, stream, extensions
```
Which we can use in the same way:
@@ -1084,24 +1118,23 @@ which transport an outgoing request should be routed via, with [the same style
used for specifying proxy routing](#routing).
```python
-import httpcore
import httpx
-class HTTPSRedirectTransport(httpcore.SyncHTTPTransport):
+class HTTPSRedirectTransport(httpx.BaseTransport):
"""
A transport that always redirects to HTTPS.
"""
- def request(self, method, url, headers=None, stream=None, ext=None):
+ def handle_request(self, method, url, headers, stream, extensions):
scheme, host, port, path = url
if port is None:
location = b"https://%s%s" % (host, path)
else:
location = b"https://%s:%d%s" % (host, port, path)
- stream = httpcore.PlainByteStream(b"")
+ stream = httpx.ByteStream(b"")
headers = [(b"location", location)]
- ext = {"http_version": b"HTTP/1.1"}
- return 303, headers, stream, ext
+ extensions = {}
+ return 303, headers, stream, extensions
# A client where any `http` requests are always redirected to `https`
diff --git a/docs/async.md b/docs/async.md
index 8ddee956ae..360be8feaa 100644
--- a/docs/async.md
+++ b/docs/async.md
@@ -237,3 +237,9 @@ async with httpx.AsyncClient(transport=transport, base_url="http://testserver")
```
See [the ASGI documentation](https://asgi.readthedocs.io/en/latest/specs/www.html#connection-scope) for more details on the `client` and `root_path` keys.
+
+## Startup/shutdown of ASGI apps
+
+It is not in the scope of HTTPX to trigger lifespan events of your app.
+
+However it is suggested to use `LifespanManager` from [asgi-lifespan](https://github.com/florimondmanca/asgi-lifespan#usage) in pair with `AsyncClient`.
diff --git a/docs/code_of_conduct.md b/docs/code_of_conduct.md
new file mode 100644
index 0000000000..1647289871
--- /dev/null
+++ b/docs/code_of_conduct.md
@@ -0,0 +1,56 @@
+# Code of Conduct
+
+We expect contributors to our projects and online spaces to follow [the Python Software Foundation’s Code of Conduct](https://www.python.org/psf/conduct/).
+
+The Python community is made up of members from around the globe with a diverse set of skills, personalities, and experiences. It is through these differences that our community experiences great successes and continued growth. When you're working with members of the community, this Code of Conduct will help steer your interactions and keep Python a positive, successful, and growing community.
+
+## Our Community
+
+Members of the Python community are **open, considerate, and respectful**. Behaviours that reinforce these values contribute to a positive environment, and include:
+
+* **Being open.** Members of the community are open to collaboration, whether it's on PEPs, patches, problems, or otherwise.
+* **Focusing on what is best for the community.** We're respectful of the processes set forth in the community, and we work within them.
+* **Acknowledging time and effort.** We're respectful of the volunteer efforts that permeate the Python community. We're thoughtful when addressing the efforts of others, keeping in mind that often times the labor was completed simply for the good of the community.
+* **Being respectful of differing viewpoints and experiences.** We're receptive to constructive comments and criticism, as the experiences and skill sets of other members contribute to the whole of our efforts.
+* **Showing empathy towards other community members.** We're attentive in our communications, whether in person or online, and we're tactful when approaching differing views.
+* **Being considerate.** Members of the community are considerate of their peers -- other Python users.
+* **Being respectful.** We're respectful of others, their positions, their skills, their commitments, and their efforts.
+* **Gracefully accepting constructive criticism.** When we disagree, we are courteous in raising our issues.
+* **Using welcoming and inclusive language.** We're accepting of all who wish to take part in our activities, fostering an environment where anyone can participate and everyone can make a difference.
+
+## Our Standards
+
+Every member of our community has the right to have their identity respected. The Python community is dedicated to providing a positive experience for everyone, regardless of age, gender identity and expression, sexual orientation, disability, physical appearance, body size, ethnicity, nationality, race, or religion (or lack thereof), education, or socio-economic status.
+
+## Inappropriate Behavior
+
+Examples of unacceptable behavior by participants include:
+
+* Harassment of any participants in any form
+* Deliberate intimidation, stalking, or following
+* Logging or taking screenshots of online activity for harassment purposes
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Violent threats or language directed against another person
+* Incitement of violence or harassment towards any individual, including encouraging a person to commit suicide or to engage in self-harm
+* Creating additional online accounts in order to harass another person or circumvent a ban
+* Sexual language and imagery in online communities or in any conference venue, including talks
+* Insults, put downs, or jokes that are based upon stereotypes, that are exclusionary, or that hold others up for ridicule
+* Excessive swearing
+* Unwelcome sexual attention or advances
+* Unwelcome physical contact, including simulated physical contact (eg, textual descriptions like "hug" or "backrub") without consent or after a request to stop
+* Pattern of inappropriate social contact, such as requesting/assuming inappropriate levels of intimacy with others
+* Sustained disruption of online community discussions, in-person presentations, or other in-person events
+* Continued one-on-one communication after requests to cease
+* Other conduct that is inappropriate for a professional audience including people of many different backgrounds
+
+Community members asked to stop any inappropriate behavior are expected to comply immediately.
+
+## Enforcement
+
+We take Code of Conduct violations seriously, and will act to ensure our spaces are welcoming, inclusive, and professional environments to communicate in.
+
+If you need to raise a Code of Conduct report, you may do so privately by email to tom@tomchristie.com.
+
+Reports will be treated confidentially.
+
+Alternately you may [make a report to the Python Software Foundation](https://www.python.org/psf/conduct/reporting/).
diff --git a/docs/compatibility.md b/docs/compatibility.md
index 99faf43232..7aed9dc1ed 100644
--- a/docs/compatibility.md
+++ b/docs/compatibility.md
@@ -28,8 +28,8 @@ And using `data=...` to send form data:
httpx.post(..., data={"message": "Hello, world"})
```
-If you're using a type checking tool such as `mypy`, you'll see warnings issues if using test/byte content with the `data` argument.
-However, for compatibility reasons with `requests`, we do still handle the case where `data=...` is used with raw binary and text contents.
+Using the `data=` will raise a deprecation warning,
+and is expected to be fully removed with the HTTPX 1.0 release.
## Content encoding
@@ -37,6 +37,26 @@ HTTPX uses `utf-8` for encoding `str` request bodies. For example, when using `c
For response bodies, assuming the server didn't send an explicit encoding then HTTPX will do its best to figure out an appropriate encoding. Unlike Requests which uses the `chardet` library, HTTPX relies on a plainer fallback strategy (basically attempting UTF-8, or using Windows-1252 as a fallback). This strategy should be robust enough to handle the vast majority of use cases.
+## Cookies
+
+If using a client instance, then cookies should always be set on the client rather than on a per-request basis.
+
+This usage is supported:
+
+```python
+client = httpx.Client(cookies=...)
+client.post(...)
+```
+
+This usage is **not** supported:
+
+```python
+client = httpx.Client()
+client.post(..., cookies=...)
+```
+
+We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur.
+
## Status Codes
In our documentation we prefer the uppercased versions, such as `codes.NOT_FOUND`, but also provide lower-cased versions for API compatibility with `requests`.
@@ -124,6 +144,11 @@ On the other hand, HTTPX uses [HTTPCore](https://github.com/encode/httpcore) as
`requests` omits `params` whose values are `None` (e.g. `requests.get(..., params={"foo": None})`). This is not supported by HTTPX.
+## HEAD redirection
+
+In `requests`, all top-level API follow redirects by default except `HEAD`.
+In consideration of consistency, we make `HEAD` follow redirects by default in HTTPX.
+
## Determining the next redirect request
When using `allow_redirects=False`, the `requests` library exposes an attribute `response.next`, which can be used to obtain the next redirect request.
@@ -142,6 +167,6 @@ while request is not None:
`requests` allows event hooks to mutate `Request` and `Response` objects. See [examples](https://requests.readthedocs.io/en/master/user/advanced/#event-hooks) given in the documentation for `requests`.
-In HTTPX, event hooks may access properties of requests and responses, but event hook callbacks cannot mutate the original request/response.
+In HTTPX, event hooks may access properties of requests and responses, but event hook callbacks cannot mutate the original request/response.
If you are looking for more control, consider checking out [Custom Transports](advanced.md#custom-transports).
diff --git a/docs/contributing.md b/docs/contributing.md
index 9732c81059..e1a953dc97 100644
--- a/docs/contributing.md
+++ b/docs/contributing.md
@@ -12,10 +12,13 @@ There are many ways you can contribute to the project:
## Reporting Bugs or Other Issues
Found something that HTTPX should support?
-Stumbled upon some unexpected behavior?
+Stumbled upon some unexpected behaviour?
+
+Contributions should generally start out with [a discussion](https://github.com/encode/httpx/discussions).
+Possible bugs may be raised as a "Potential Issue" discussion, feature requests may
+be raised as an "Ideas" discussion. We can then determine if the discussion needs
+to be escalated into an "Issue" or not, or if we'd consider a pull request.
-Feel free to open an issue at the
-[issue tracker](https://github.com/encode/httpx/issues).
Try to be more descriptive as you can and in case of a bug report,
provide as much information as possible like:
@@ -25,6 +28,15 @@ provide as much information as possible like:
- Code snippet
- Error traceback
+You should always try to reduce any examples to the *simplest possible case*
+that demonstrates the issue.
+
+Some possibly useful tips for narrowing down potential issues...
+
+- Does the issue exist on HTTP/1.1, or HTTP/2, or both?
+- Does the issue exist with `Client`, `AsyncClient`, or both?
+- When using `AsyncClient` does the issue exist when using `asyncio` or `trio`, or both?
+
## Development
To start developing HTTPX create a **fork** of the
diff --git a/docs/exceptions.md b/docs/exceptions.md
index 949ac47a19..3de8fc6b57 100644
--- a/docs/exceptions.md
+++ b/docs/exceptions.md
@@ -162,11 +162,11 @@ except httpx.HTTPStatusError as exc:
::: httpx.StreamConsumed
:docstring:
-::: httpx.ResponseNotRead
+::: httpx.StreamClosed
:docstring:
-::: httpx.RequestNotRead
+::: httpx.ResponseNotRead
:docstring:
-::: httpx.ResponseClosed
+::: httpx.RequestNotRead
:docstring:
diff --git a/docs/index.md b/docs/index.md
index 3da239f7e8..8a41dca3b1 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -27,7 +27,7 @@ HTTPX is a fully featured HTTP client for Python 3, which provides sync and asyn
!!! note
HTTPX should currently be considered in beta.
- We believe we've got the public API to a stable point now, but would strongly recommend pinning your dependencies to the `0.17.*` release, so that you're able to properly review [API changes between package updates](https://github.com/encode/httpx/blob/master/CHANGELOG.md).
+ We believe we've got the public API to a stable point now, but would strongly recommend pinning your dependencies to the `0.18.*` release, so that you're able to properly review [API changes between package updates](https://github.com/encode/httpx/blob/master/CHANGELOG.md).
A 1.0 release is expected to be issued sometime in 2021.
@@ -101,7 +101,7 @@ the [async support](async.md) section, or the [HTTP/2](http2.md) section.
The [Developer Interface](api.md) provides a comprehensive API reference.
-To find out about tools that integrate with HTTPX, see [Third Party Packages](third-party-packages.md).
+To find out about tools that integrate with HTTPX, see [Third Party Packages](third_party_packages.md).
## Dependencies
@@ -114,6 +114,7 @@ The HTTPX project relies on these excellent libraries:
* `rfc3986` - URL parsing & normalization.
* `idna` - Internationalized domain name support.
* `sniffio` - Async library autodetection.
+* `async_generator` - Backport support for `contextlib.asynccontextmanager`. *(Only required for Python 3.6)*
* `brotlipy` - Decoding for "brotli" compressed responses. *(Optional)*
A huge amount of credit is due to `requests` for the API layout that
diff --git a/docs/quickstart.md b/docs/quickstart.md
index 4f15549e71..4afaff2430 100644
--- a/docs/quickstart.md
+++ b/docs/quickstart.md
@@ -408,7 +408,8 @@ with additional API for accessing cookies by their domain or path.
## Redirection and History
-By default, HTTPX will follow redirects for anything except `HEAD` requests.
+By default, HTTPX will follow redirects for all HTTP methods.
+
The `history` property of the response can be used to inspect any followed redirects.
It contains a list of any redirect responses that were followed, in the order
@@ -436,16 +437,6 @@ You can modify the default redirection handling with the allow_redirects paramet
[]
```
-If you’re making a `HEAD` request, you can use this to enable redirection:
-
-```pycon
->>> r = httpx.head('http://github.com/', allow_redirects=True)
->>> r.url
-'https://github.com/'
->>> r.history
-[]
-```
-
## Timeouts
HTTPX defaults to including reasonable timeouts for all network operations,
diff --git a/docs/third-party-packages.md b/docs/third_party_packages.md
similarity index 78%
rename from docs/third-party-packages.md
rename to docs/third_party_packages.md
index 02a00e70a1..8c60b11d91 100644
--- a/docs/third-party-packages.md
+++ b/docs/third_party_packages.md
@@ -18,6 +18,18 @@ The ultimate Python library in building OAuth and OpenID Connect clients and ser
An asynchronous GitHub API library. Includes [HTTPX support](https://gidgethub.readthedocs.io/en/latest/httpx.html).
+### HTTPX-Auth
+
+[GitHub](https://github.com/Colin-b/httpx_auth) - [Documentation](https://colin-b.github.io/httpx_auth/)
+
+Provides authentication classes to be used with HTTPX [authentication parameter](advanced.md#customizing-authentication).
+
+### pytest-HTTPX
+
+[GitHub](https://github.com/Colin-b/pytest_httpx) - [Documentation](https://colin-b.github.io/pytest_httpx/)
+
+Provides `httpx_mock` [pytest](https://docs.pytest.org/en/latest/) fixture to mock HTTPX within test cases.
+
### RESPX
[GitHub](https://github.com/lundberg/respx) - [Documentation](https://lundberg.github.io/respx/)
diff --git a/httpx/__init__.py b/httpx/__init__.py
index 96d9e0c2f8..9a27790f4c 100644
--- a/httpx/__init__.py
+++ b/httpx/__init__.py
@@ -3,6 +3,7 @@
from ._auth import Auth, BasicAuth, DigestAuth
from ._client import AsyncClient, Client
from ._config import Limits, Proxy, Timeout, create_ssl_context
+from ._content import ByteStream
from ._exceptions import (
CloseError,
ConnectError,
@@ -22,8 +23,8 @@
RemoteProtocolError,
RequestError,
RequestNotRead,
- ResponseClosed,
ResponseNotRead,
+ StreamClosed,
StreamConsumed,
StreamError,
TimeoutException,
@@ -34,8 +35,14 @@
WriteTimeout,
)
from ._models import URL, Cookies, Headers, QueryParams, Request, Response
-from ._status_codes import StatusCode, codes
+from ._status_codes import codes
from ._transports.asgi import ASGITransport
+from ._transports.base import (
+ AsyncBaseTransport,
+ AsyncByteStream,
+ BaseTransport,
+ SyncByteStream,
+)
from ._transports.default import AsyncHTTPTransport, HTTPTransport
from ._transports.mock import MockTransport
from ._transports.wsgi import WSGITransport
@@ -45,10 +52,14 @@
"__title__",
"__version__",
"ASGITransport",
+ "AsyncBaseTransport",
+ "AsyncByteStream",
"AsyncClient",
"AsyncHTTPTransport",
"Auth",
+ "BaseTransport",
"BasicAuth",
+ "ByteStream",
"Client",
"CloseError",
"codes",
@@ -88,12 +99,12 @@
"RequestError",
"RequestNotRead",
"Response",
- "ResponseClosed",
"ResponseNotRead",
- "StatusCode",
"stream",
+ "StreamClosed",
"StreamConsumed",
"StreamError",
+ "SyncByteStream",
"Timeout",
"TimeoutException",
"TooManyRedirects",
diff --git a/httpx/__version__.py b/httpx/__version__.py
index 90fae6b2fb..b847686501 100644
--- a/httpx/__version__.py
+++ b/httpx/__version__.py
@@ -1,3 +1,3 @@
__title__ = "httpx"
__description__ = "A next generation HTTP client, for Python 3."
-__version__ = "0.17.1"
+__version__ = "0.18.0"
diff --git a/httpx/_api.py b/httpx/_api.py
index 8cfaf6dfda..ff40ce65e1 100644
--- a/httpx/_api.py
+++ b/httpx/_api.py
@@ -1,8 +1,9 @@
import typing
+from contextlib import contextmanager
-from ._client import Client, StreamContextManager
+from ._client import Client
from ._config import DEFAULT_TIMEOUT_CONFIG
-from ._models import Request, Response
+from ._models import Response
from ._types import (
AuthTypes,
CertTypes,
@@ -68,7 +69,8 @@ def request(
* **allow_redirects** - *(optional)* Enables or disables HTTP redirects.
* **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
verify the identity of requested hosts. Either `True` (default CA bundle),
- a path to an SSL certificate file, or `False` (disable verification).
+ a path to an SSL certificate file, an `ssl.SSLContext`, or `False`
+ (which will disable verification).
* **cert** - *(optional)* An SSL certificate used by the requested host
to authenticate the client. Either a path to an SSL certificate file, or
two-tuple of (certificate file, key file), or a three-tuple of (certificate
@@ -88,7 +90,12 @@ def request(
```
"""
with Client(
- proxies=proxies, cert=cert, verify=verify, timeout=timeout, trust_env=trust_env
+ cookies=cookies,
+ proxies=proxies,
+ cert=cert,
+ verify=verify,
+ timeout=timeout,
+ trust_env=trust_env,
) as client:
return client.request(
method=method,
@@ -99,12 +106,12 @@ def request(
json=json,
params=params,
headers=headers,
- cookies=cookies,
auth=auth,
allow_redirects=allow_redirects,
)
+@contextmanager
def stream(
method: str,
url: URLTypes,
@@ -123,7 +130,7 @@ def stream(
verify: VerifyTypes = True,
cert: CertTypes = None,
trust_env: bool = True,
-) -> StreamContextManager:
+) -> typing.Iterator[Response]:
"""
Alternative to `httpx.request()` that streams the response body
instead of loading it into memory at once.
@@ -134,26 +141,22 @@ def stream(
[0]: /quickstart#streaming-responses
"""
- client = Client(proxies=proxies, cert=cert, verify=verify, trust_env=trust_env)
- request = Request(
- method=method,
- url=url,
- params=params,
- content=content,
- data=data,
- files=files,
- json=json,
- headers=headers,
- cookies=cookies,
- )
- return StreamContextManager(
- client=client,
- request=request,
- auth=auth,
- timeout=timeout,
- allow_redirects=allow_redirects,
- close_client=True,
- )
+ with Client(
+ cookies=cookies, proxies=proxies, cert=cert, verify=verify, trust_env=trust_env
+ ) as client:
+ with client.stream(
+ method=method,
+ url=url,
+ content=content,
+ data=data,
+ files=files,
+ json=json,
+ params=params,
+ headers=headers,
+ auth=auth,
+ allow_redirects=allow_redirects,
+ ) as response:
+ yield response
def get(
diff --git a/httpx/_client.py b/httpx/_client.py
index 3465a10b75..ae42e9eac6 100644
--- a/httpx/_client.py
+++ b/httpx/_client.py
@@ -2,12 +2,12 @@
import enum
import typing
import warnings
+from contextlib import contextmanager
from types import TracebackType
-import httpcore
-
from .__version__ import __version__
from ._auth import Auth, BasicAuth, FunctionAuth
+from ._compat import asynccontextmanager
from ._config import (
DEFAULT_LIMITS,
DEFAULT_MAX_REDIRECTS,
@@ -20,20 +20,24 @@
)
from ._decoders import SUPPORTED_DECODERS
from ._exceptions import (
- HTTPCORE_EXC_MAP,
InvalidURL,
RemoteProtocolError,
TooManyRedirects,
- map_exceptions,
+ request_context,
)
from ._models import URL, Cookies, Headers, QueryParams, Request, Response
from ._status_codes import codes
from ._transports.asgi import ASGITransport
+from ._transports.base import (
+ AsyncBaseTransport,
+ AsyncByteStream,
+ BaseTransport,
+ SyncByteStream,
+)
from ._transports.default import AsyncHTTPTransport, HTTPTransport
from ._transports.wsgi import WSGITransport
from ._types import (
AuthTypes,
- ByteStream,
CertTypes,
CookieTypes,
HeaderTypes,
@@ -53,7 +57,6 @@
get_environment_proxies,
get_logger,
same_origin,
- warn_deprecated,
)
# The type annotation for @classmethod and context managers here follows PEP 484
@@ -71,11 +74,65 @@
class ClientState(enum.Enum):
+ # UNOPENED:
+ # The client has been instantiated, but has not been used to send a request,
+ # or been opened by entering the context of a `with` block.
UNOPENED = 1
+ # OPENED:
+ # The client has either sent a request, or is within a `with` block.
OPENED = 2
+ # CLOSED:
+ # The client has either exited the `with` block, or `close()` has
+ # been called explicitly.
CLOSED = 3
+class BoundSyncStream(SyncByteStream):
+ """
+ A byte stream that is bound to a given response instance, and that
+ ensures the `response.elapsed` is set once the response is closed.
+ """
+
+ def __init__(
+ self, stream: SyncByteStream, response: Response, timer: Timer
+ ) -> None:
+ self._stream = stream
+ self._response = response
+ self._timer = timer
+
+ def __iter__(self) -> typing.Iterator[bytes]:
+ for chunk in self._stream:
+ yield chunk
+
+ def close(self) -> None:
+ seconds = self._timer.sync_elapsed()
+ self._response.elapsed = datetime.timedelta(seconds=seconds)
+ self._stream.close()
+
+
+class BoundAsyncStream(AsyncByteStream):
+ """
+ An async byte stream that is bound to a given response instance, and that
+ ensures the `response.elapsed` is set once the response is closed.
+ """
+
+ def __init__(
+ self, stream: AsyncByteStream, response: Response, timer: Timer
+ ) -> None:
+ self._stream = stream
+ self._response = response
+ self._timer = timer
+
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
+ async for chunk in self._stream:
+ yield chunk
+
+ async def aclose(self) -> None:
+ seconds = await self._timer.async_elapsed()
+ self._response.elapsed = datetime.timedelta(seconds=seconds)
+ await self._stream.aclose()
+
+
class BaseClient:
def __init__(
self,
@@ -233,51 +290,6 @@ def params(self) -> QueryParams:
def params(self, params: QueryParamTypes) -> None:
self._params = QueryParams(params)
- def stream(
- self,
- method: str,
- url: URLTypes,
- *,
- content: RequestContent = None,
- data: RequestData = None,
- files: RequestFiles = None,
- json: typing.Any = None,
- params: QueryParamTypes = None,
- headers: HeaderTypes = None,
- cookies: CookieTypes = None,
- auth: typing.Union[AuthTypes, UnsetType] = UNSET,
- allow_redirects: bool = True,
- timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
- ) -> "StreamContextManager":
- """
- Alternative to `httpx.request()` that streams the response body
- instead of loading it into memory at once.
-
- **Parameters**: See `httpx.request`.
-
- See also: [Streaming Responses][0]
-
- [0]: /quickstart#streaming-responses
- """
- request = self.build_request(
- method=method,
- url=url,
- content=content,
- data=data,
- files=files,
- json=json,
- params=params,
- headers=headers,
- cookies=cookies,
- )
- return StreamContextManager(
- client=self,
- request=request,
- auth=auth,
- allow_redirects=allow_redirects,
- timeout=timeout,
- )
-
def build_request(
self,
method: str,
@@ -325,10 +337,19 @@ def _merge_url(self, url: URLTypes) -> URL:
"""
merge_url = URL(url)
if merge_url.is_relative_url:
- # We always ensure the base_url paths include the trailing '/',
- # and always strip any leading '/' from the merge URL.
- merge_url = merge_url.copy_with(raw_path=merge_url.raw_path.lstrip(b"/"))
- return self.base_url.join(merge_url)
+ # To merge URLs we always append to the base URL. To get this
+ # behaviour correct we always ensure the base URL ends in a '/'
+ # seperator, and strip any leading '/' from the merge URL.
+ #
+ # So, eg...
+ #
+ # >>> client = Client(base_url="https://www.example.com/subpath")
+ # >>> client.base_url
+ # URL('https://www.example.com/subpath/')
+ # >>> client.build_request("GET", "/path").url
+ # URL('https://www.example.com/subpath/path')
+ merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/")
+ return self.base_url.copy_with(raw_path=merge_raw_path)
return merge_url
def _merge_cookies(
@@ -364,7 +385,7 @@ def _merge_queryparams(
"""
if params or self.params:
merged_queryparams = QueryParams(self.params)
- merged_queryparams.update(params)
+ merged_queryparams = merged_queryparams.merge(params)
return merged_queryparams
return params
@@ -494,7 +515,7 @@ def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers:
def _redirect_stream(
self, request: Request, method: str
- ) -> typing.Optional[ByteStream]:
+ ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]:
"""
Return the body that should be used for the redirect request.
"""
@@ -527,7 +548,8 @@ class Client(BaseClient):
sending requests.
* **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
verify the identity of requested hosts. Either `True` (default CA bundle),
- a path to an SSL certificate file, or `False` (disable verification).
+ a path to an SSL certificate file, an `ssl.SSLContext`, or `False`
+ (which will disable verification).
* **cert** - *(optional)* An SSL certificate used by the requested host
to authenticate the client. Either a path to an SSL certificate file, or
two-tuple of (certificate file, key file), or a three-tuple of (certificate
@@ -560,14 +582,13 @@ def __init__(
cert: CertTypes = None,
http2: bool = False,
proxies: ProxiesTypes = None,
- mounts: typing.Mapping[str, httpcore.SyncHTTPTransport] = None,
+ mounts: typing.Mapping[str, BaseTransport] = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
limits: Limits = DEFAULT_LIMITS,
- pool_limits: Limits = None,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None,
base_url: URLTypes = "",
- transport: httpcore.SyncHTTPTransport = None,
+ transport: BaseTransport = None,
app: typing.Callable = None,
trust_env: bool = True,
):
@@ -592,13 +613,6 @@ def __init__(
"Make sure to install httpx using `pip install httpx[http2]`."
) from None
- if pool_limits is not None:
- warn_deprecated(
- "Client(..., pool_limits=...) is deprecated and will raise "
- "errors in the future. Use Client(..., limits=...) instead."
- )
- limits = pool_limits
-
allow_env_proxies = trust_env and app is None and transport is None
proxy_map = self._get_proxy_map(proxies, allow_env_proxies)
@@ -611,9 +625,7 @@ def __init__(
app=app,
trust_env=trust_env,
)
- self._mounts: typing.Dict[
- URLPattern, typing.Optional[httpcore.SyncHTTPTransport]
- ] = {
+ self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = {
URLPattern(key): None
if proxy is None
else self._init_proxy_transport(
@@ -639,10 +651,10 @@ def _init_transport(
cert: CertTypes = None,
http2: bool = False,
limits: Limits = DEFAULT_LIMITS,
- transport: httpcore.SyncHTTPTransport = None,
+ transport: BaseTransport = None,
app: typing.Callable = None,
trust_env: bool = True,
- ) -> httpcore.SyncHTTPTransport:
+ ) -> BaseTransport:
if transport is not None:
return transport
@@ -661,7 +673,7 @@ def _init_proxy_transport(
http2: bool = False,
limits: Limits = DEFAULT_LIMITS,
trust_env: bool = True,
- ) -> httpcore.SyncHTTPTransport:
+ ) -> BaseTransport:
return HTTPTransport(
verify=verify,
cert=cert,
@@ -671,7 +683,7 @@ def _init_proxy_transport(
proxy=proxy,
)
- def _transport_for_url(self, url: URL) -> httpcore.SyncHTTPTransport:
+ def _transport_for_url(self, url: URL) -> BaseTransport:
"""
Returns the transport instance that should be used for a given URL.
This will either be the standard connection pool, or a proxy.
@@ -714,6 +726,14 @@ def request(
[0]: /advanced/#merging-of-configuration
"""
+ if cookies is not None:
+ message = (
+ "Setting per-request cookies=<...> is being deprecated, because "
+ "the expected behaviour on cookie persistence is ambiguous. Set "
+ "cookies directly on the client instance instead."
+ )
+ warnings.warn(message, DeprecationWarning)
+
request = self.build_request(
method=method,
url=url,
@@ -729,6 +749,56 @@ def request(
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout
)
+ @contextmanager
+ def stream(
+ self,
+ method: str,
+ url: URLTypes,
+ *,
+ content: RequestContent = None,
+ data: RequestData = None,
+ files: RequestFiles = None,
+ json: typing.Any = None,
+ params: QueryParamTypes = None,
+ headers: HeaderTypes = None,
+ cookies: CookieTypes = None,
+ auth: typing.Union[AuthTypes, UnsetType] = UNSET,
+ allow_redirects: bool = True,
+ timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+ ) -> typing.Iterator[Response]:
+ """
+ Alternative to `httpx.request()` that streams the response body
+ instead of loading it into memory at once.
+
+ **Parameters**: See `httpx.request`.
+
+ See also: [Streaming Responses][0]
+
+ [0]: /quickstart#streaming-responses
+ """
+ request = self.build_request(
+ method=method,
+ url=url,
+ content=content,
+ data=data,
+ files=files,
+ json=json,
+ params=params,
+ headers=headers,
+ cookies=cookies,
+ )
+ response = self.send(
+ request=request,
+ auth=auth,
+ allow_redirects=allow_redirects,
+ timeout=timeout,
+ stream=True,
+ )
+ try:
+ yield response
+ finally:
+ response.close()
+
def send(
self,
request: Request,
@@ -766,21 +836,18 @@ def send(
allow_redirects=allow_redirects,
history=[],
)
-
- if not stream:
- try:
+ try:
+ if not stream:
response.read()
- finally:
- response.close()
- try:
for hook in self._event_hooks["response"]:
hook(response)
- except Exception:
- response.close()
- raise
- return response
+ return response
+
+ except Exception as exc:
+ response.close()
+ raise exc
def _send_handling_auth(
self,
@@ -804,18 +871,20 @@ def _send_handling_auth(
history=history,
)
try:
- next_request = auth_flow.send(response)
- except StopIteration:
- return response
- except BaseException as exc:
- response.close()
- raise exc from None
- else:
+ try:
+ next_request = auth_flow.send(response)
+ except StopIteration:
+ return response
+
response.history = list(history)
response.read()
request = next_request
history.append(response)
+ except Exception as exc:
+ response.close()
+ raise exc
+
def _send_handling_redirects(
self,
request: Request,
@@ -830,19 +899,24 @@ def _send_handling_redirects(
)
response = self._send_single_request(request, timeout)
- response.history = list(history)
+ try:
+ response.history = list(history)
- if not response.is_redirect:
- return response
+ if not response.is_redirect:
+ return response
- if allow_redirects:
- response.read()
- request = self._build_redirect_request(request, response)
- history = history + [response]
+ request = self._build_redirect_request(request, response)
+ history = history + [response]
- if not allow_redirects:
- response.next_request = request
- return response
+ if allow_redirects:
+ response.read()
+ else:
+ response.next_request = request
+ return response
+
+ except Exception as exc:
+ response.close()
+ raise exc
def _send_single_request(self, request: Request, timeout: Timeout) -> Response:
"""
@@ -852,29 +926,29 @@ def _send_single_request(self, request: Request, timeout: Timeout) -> Response:
timer = Timer()
timer.sync_start()
- with map_exceptions(HTTPCORE_EXC_MAP, request=request):
- (status_code, headers, stream, ext) = transport.request(
+ if not isinstance(request.stream, SyncByteStream):
+ raise RuntimeError(
+ "Attempted to send an async request with a sync Client instance."
+ )
+
+ with request_context(request=request):
+ (status_code, headers, stream, extensions) = transport.handle_request(
request.method.encode(),
request.url.raw,
headers=request.headers.raw,
- stream=request.stream, # type: ignore
- ext={"timeout": timeout.as_dict()},
+ stream=request.stream,
+ extensions={"timeout": timeout.as_dict()},
)
- def on_close(response: Response) -> None:
- response.elapsed = datetime.timedelta(seconds=timer.sync_elapsed())
- if hasattr(stream, "close"):
- stream.close()
-
response = Response(
status_code,
headers=headers,
- stream=stream, # type: ignore
- ext=ext,
+ stream=stream,
+ extensions=extensions,
request=request,
- on_close=on_close,
)
+ response.stream = BoundSyncStream(stream, response=response, timer=timer)
self.cookies.extract_cookies(response)
status = f"{response.status_code} {response.reason_phrase}"
@@ -1131,7 +1205,8 @@ def __exit__(
transport.__exit__(exc_type, exc_value, traceback)
def __del__(self) -> None:
- self.close()
+ if self._state == ClientState.OPENED:
+ self.close()
class AsyncClient(BaseClient):
@@ -1193,14 +1268,13 @@ def __init__(
cert: CertTypes = None,
http2: bool = False,
proxies: ProxiesTypes = None,
- mounts: typing.Mapping[str, httpcore.AsyncHTTPTransport] = None,
+ mounts: typing.Mapping[str, AsyncBaseTransport] = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
limits: Limits = DEFAULT_LIMITS,
- pool_limits: Limits = None,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None,
base_url: URLTypes = "",
- transport: httpcore.AsyncHTTPTransport = None,
+ transport: AsyncBaseTransport = None,
app: typing.Callable = None,
trust_env: bool = True,
):
@@ -1225,13 +1299,6 @@ def __init__(
"Make sure to install httpx using `pip install httpx[http2]`."
) from None
- if pool_limits is not None:
- warn_deprecated(
- "AsyncClient(..., pool_limits=...) is deprecated and will raise "
- "errors in the future. Use AsyncClient(..., limits=...) instead."
- )
- limits = pool_limits
-
allow_env_proxies = trust_env and app is None and transport is None
proxy_map = self._get_proxy_map(proxies, allow_env_proxies)
@@ -1245,9 +1312,7 @@ def __init__(
trust_env=trust_env,
)
- self._mounts: typing.Dict[
- URLPattern, typing.Optional[httpcore.AsyncHTTPTransport]
- ] = {
+ self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = {
URLPattern(key): None
if proxy is None
else self._init_proxy_transport(
@@ -1272,10 +1337,10 @@ def _init_transport(
cert: CertTypes = None,
http2: bool = False,
limits: Limits = DEFAULT_LIMITS,
- transport: httpcore.AsyncHTTPTransport = None,
+ transport: AsyncBaseTransport = None,
app: typing.Callable = None,
trust_env: bool = True,
- ) -> httpcore.AsyncHTTPTransport:
+ ) -> AsyncBaseTransport:
if transport is not None:
return transport
@@ -1294,7 +1359,7 @@ def _init_proxy_transport(
http2: bool = False,
limits: Limits = DEFAULT_LIMITS,
trust_env: bool = True,
- ) -> httpcore.AsyncHTTPTransport:
+ ) -> AsyncBaseTransport:
return AsyncHTTPTransport(
verify=verify,
cert=cert,
@@ -1304,7 +1369,7 @@ def _init_proxy_transport(
proxy=proxy,
)
- def _transport_for_url(self, url: URL) -> httpcore.AsyncHTTPTransport:
+ def _transport_for_url(self, url: URL) -> AsyncBaseTransport:
"""
Returns the transport instance that should be used for a given URL.
This will either be the standard connection pool, or a proxy.
@@ -1363,6 +1428,56 @@ async def request(
)
return response
+ @asynccontextmanager
+ async def stream(
+ self,
+ method: str,
+ url: URLTypes,
+ *,
+ content: RequestContent = None,
+ data: RequestData = None,
+ files: RequestFiles = None,
+ json: typing.Any = None,
+ params: QueryParamTypes = None,
+ headers: HeaderTypes = None,
+ cookies: CookieTypes = None,
+ auth: typing.Union[AuthTypes, UnsetType] = UNSET,
+ allow_redirects: bool = True,
+ timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+ ) -> typing.AsyncIterator[Response]:
+ """
+ Alternative to `httpx.request()` that streams the response body
+ instead of loading it into memory at once.
+
+ **Parameters**: See `httpx.request`.
+
+ See also: [Streaming Responses][0]
+
+ [0]: /quickstart#streaming-responses
+ """
+ request = self.build_request(
+ method=method,
+ url=url,
+ content=content,
+ data=data,
+ files=files,
+ json=json,
+ params=params,
+ headers=headers,
+ cookies=cookies,
+ )
+ response = await self.send(
+ request=request,
+ auth=auth,
+ allow_redirects=allow_redirects,
+ timeout=timeout,
+ stream=True,
+ )
+ try:
+ yield response
+ finally:
+ await response.aclose()
+
async def send(
self,
request: Request,
@@ -1400,21 +1515,18 @@ async def send(
allow_redirects=allow_redirects,
history=[],
)
-
- if not stream:
- try:
+ try:
+ if not stream:
await response.aread()
- finally:
- await response.aclose()
- try:
for hook in self._event_hooks["response"]:
await hook(response)
- except Exception:
- await response.aclose()
- raise
- return response
+ return response
+
+ except Exception as exc:
+ await response.aclose()
+ raise exc
async def _send_handling_auth(
self,
@@ -1438,18 +1550,20 @@ async def _send_handling_auth(
history=history,
)
try:
- next_request = await auth_flow.asend(response)
- except StopAsyncIteration:
- return response
- except BaseException as exc:
- await response.aclose()
- raise exc from None
- else:
+ try:
+ next_request = await auth_flow.asend(response)
+ except StopAsyncIteration:
+ return response
+
response.history = list(history)
await response.aread()
request = next_request
history.append(response)
+ except Exception as exc:
+ await response.aclose()
+ raise exc
+
async def _send_handling_redirects(
self,
request: Request,
@@ -1464,19 +1578,24 @@ async def _send_handling_redirects(
)
response = await self._send_single_request(request, timeout)
- response.history = list(history)
+ try:
+ response.history = list(history)
- if not response.is_redirect:
- return response
+ if not response.is_redirect:
+ return response
- if allow_redirects:
- await response.aread()
- request = self._build_redirect_request(request, response)
- history = history + [response]
+ request = self._build_redirect_request(request, response)
+ history = history + [response]
- if not allow_redirects:
- response.next_request = request
- return response
+ if allow_redirects:
+ await response.aread()
+ else:
+ response.next_request = request
+ return response
+
+ except Exception as exc:
+ await response.aclose()
+ raise exc
async def _send_single_request(
self, request: Request, timeout: Timeout
@@ -1488,30 +1607,34 @@ async def _send_single_request(
timer = Timer()
await timer.async_start()
- with map_exceptions(HTTPCORE_EXC_MAP, request=request):
- (status_code, headers, stream, ext) = await transport.arequest(
+ if not isinstance(request.stream, AsyncByteStream):
+ raise RuntimeError(
+ "Attempted to send an sync request with an AsyncClient instance."
+ )
+
+ with request_context(request=request):
+ (
+ status_code,
+ headers,
+ stream,
+ extensions,
+ ) = await transport.handle_async_request(
request.method.encode(),
request.url.raw,
headers=request.headers.raw,
- stream=request.stream, # type: ignore
- ext={"timeout": timeout.as_dict()},
+ stream=request.stream,
+ extensions={"timeout": timeout.as_dict()},
)
- async def on_close(response: Response) -> None:
- response.elapsed = datetime.timedelta(seconds=await timer.async_elapsed())
- if hasattr(stream, "aclose"):
- with map_exceptions(HTTPCORE_EXC_MAP, request=request):
- await stream.aclose()
-
response = Response(
status_code,
headers=headers,
- stream=stream, # type: ignore
- ext=ext,
+ stream=stream,
+ extensions=extensions,
request=request,
- on_close=on_close,
)
+ response.stream = BoundAsyncStream(stream, response=response, timer=timer)
self.cookies.extract_cookies(response)
status = f"{response.status_code} {response.reason_phrase}"
@@ -1769,69 +1892,28 @@ async def __aexit__(
def __del__(self) -> None:
if self._state == ClientState.OPENED:
+ # Unlike the sync case, we cannot silently close the client when
+ # it is garbage collected, because `.aclose()` is an async operation,
+ # but `__del__` is not.
+ #
+ # For this reason we require explicit close management for
+ # `AsyncClient`, and issue a warning on unclosed clients.
+ #
+ # The context managed style is usually preferable, because it neatly
+ # ensures proper resource cleanup:
+ #
+ # async with httpx.AsyncClient() as client:
+ # ...
+ #
+ # However, an explicit call to `aclose()` is also sufficient:
+ #
+ # client = httpx.AsyncClient()
+ # try:
+ # ...
+ # finally:
+ # await client.aclose()
warnings.warn(
f"Unclosed {self!r}. "
"See https://www.python-httpx.org/async/#opening-and-closing-clients "
"for details."
)
-
-
-class StreamContextManager:
- def __init__(
- self,
- client: BaseClient,
- request: Request,
- *,
- auth: typing.Union[AuthTypes, UnsetType] = UNSET,
- allow_redirects: bool = True,
- timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
- close_client: bool = False,
- ) -> None:
- self.client = client
- self.request = request
- self.auth = auth
- self.allow_redirects = allow_redirects
- self.timeout = timeout
- self.close_client = close_client
-
- def __enter__(self) -> "Response":
- assert isinstance(self.client, Client)
- self.response = self.client.send(
- request=self.request,
- auth=self.auth,
- allow_redirects=self.allow_redirects,
- timeout=self.timeout,
- stream=True,
- )
- return self.response
-
- def __exit__(
- self,
- exc_type: typing.Type[BaseException] = None,
- exc_value: BaseException = None,
- traceback: TracebackType = None,
- ) -> None:
- assert isinstance(self.client, Client)
- self.response.close()
- if self.close_client:
- self.client.close()
-
- async def __aenter__(self) -> "Response":
- assert isinstance(self.client, AsyncClient)
- self.response = await self.client.send(
- request=self.request,
- auth=self.auth,
- allow_redirects=self.allow_redirects,
- timeout=self.timeout,
- stream=True,
- )
- return self.response
-
- async def __aexit__(
- self,
- exc_type: typing.Type[BaseException] = None,
- exc_value: BaseException = None,
- traceback: TracebackType = None,
- ) -> None:
- assert isinstance(self.client, AsyncClient)
- await self.response.aclose()
diff --git a/httpx/_compat.py b/httpx/_compat.py
new file mode 100644
index 0000000000..47c12ba199
--- /dev/null
+++ b/httpx/_compat.py
@@ -0,0 +1,6 @@
+# `contextlib.asynccontextmanager` exists from Python 3.7 onwards.
+# For 3.6 we require the `async_generator` package for a backported version.
+try:
+ from contextlib import asynccontextmanager # type: ignore
+except ImportError: # pragma: no cover
+ from async_generator import asynccontextmanager # type: ignore # noqa
diff --git a/httpx/_content.py b/httpx/_content.py
index bf402c9e29..9c7c1ff225 100644
--- a/httpx/_content.py
+++ b/httpx/_content.py
@@ -1,4 +1,5 @@
import inspect
+import warnings
from json import dumps as json_dumps
from typing import (
Any,
@@ -12,93 +13,86 @@
)
from urllib.parse import urlencode
-from ._exceptions import StreamConsumed
+from ._exceptions import StreamClosed, StreamConsumed
from ._multipart import MultipartStream
-from ._types import (
- ByteStream,
- RequestContent,
- RequestData,
- RequestFiles,
- ResponseContent,
-)
-
+from ._transports.base import AsyncByteStream, SyncByteStream
+from ._types import RequestContent, RequestData, RequestFiles, ResponseContent
+from ._utils import primitive_value_to_str
-class PlainByteStream:
- """
- Request content encoded as plain bytes.
- """
- def __init__(self, body: bytes) -> None:
- self._body = body
+class ByteStream(AsyncByteStream, SyncByteStream):
+ def __init__(self, stream: bytes) -> None:
+ self._stream = stream
def __iter__(self) -> Iterator[bytes]:
- yield self._body
+ yield self._stream
async def __aiter__(self) -> AsyncIterator[bytes]:
- yield self._body
+ yield self._stream
-class GeneratorStream:
- """
- Request content encoded as plain bytes, using an byte generator.
- """
-
- def __init__(self, generator: Iterable[bytes]) -> None:
- self._generator = generator
+class IteratorByteStream(SyncByteStream):
+ def __init__(self, stream: Iterable[bytes]):
+ self._stream = stream
self._is_stream_consumed = False
+ self._is_generator = inspect.isgenerator(stream)
def __iter__(self) -> Iterator[bytes]:
- if self._is_stream_consumed:
+ if self._is_stream_consumed and self._is_generator:
raise StreamConsumed()
self._is_stream_consumed = True
- for part in self._generator:
+ for part in self._stream:
yield part
-class AsyncGeneratorStream:
- """
- Request content encoded as plain bytes, using an async byte iterator.
- """
-
- def __init__(self, agenerator: AsyncIterable[bytes]) -> None:
- self._agenerator = agenerator
+class AsyncIteratorByteStream(AsyncByteStream):
+ def __init__(self, stream: AsyncIterable[bytes]):
+ self._stream = stream
self._is_stream_consumed = False
+ self._is_generator = inspect.isasyncgen(stream)
async def __aiter__(self) -> AsyncIterator[bytes]:
- if self._is_stream_consumed:
+ if self._is_stream_consumed and self._is_generator:
raise StreamConsumed()
self._is_stream_consumed = True
- async for part in self._agenerator:
+ async for part in self._stream:
yield part
+class UnattachedStream(AsyncByteStream, SyncByteStream):
+ """
+ If a request or response is serialized using pickle, then it is no longer
+ attached to a stream for I/O purposes. Any stream operations should result
+ in `httpx.StreamClosed`.
+ """
+
+ def __iter__(self) -> Iterator[bytes]:
+ raise StreamClosed()
+
+ async def __aiter__(self) -> AsyncIterator[bytes]:
+ raise StreamClosed()
+ yield b"" # pragma: nocover
+
+
def encode_content(
- content: Union[str, bytes, ByteStream]
-) -> Tuple[Dict[str, str], ByteStream]:
- if isinstance(content, (str, bytes)):
+ content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]]
+) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]:
+
+ if isinstance(content, (bytes, str)):
body = content.encode("utf-8") if isinstance(content, str) else content
content_length = str(len(body))
headers = {"Content-Length": content_length} if body else {}
- stream = PlainByteStream(body)
- return headers, stream
+ return headers, ByteStream(body)
- elif isinstance(content, (Iterable, AsyncIterable)):
+ elif isinstance(content, Iterable):
headers = {"Transfer-Encoding": "chunked"}
+ return headers, IteratorByteStream(content) # type: ignore
- # Generators should be wrapped in GeneratorStream/AsyncGeneratorStream
- # which will raise `StreamConsumed` if the stream is accessed more
- # than once. (Eg. Following HTTP 307 or HTTP 308 redirects.)
- if inspect.isgenerator(content):
- generator_stream = GeneratorStream(content) # type: ignore
- return headers, generator_stream
- if inspect.isasyncgen(content):
- agenerator_stream = AsyncGeneratorStream(content) # type: ignore
- return headers, agenerator_stream
-
- # Other iterables may be passed through as-is.
- return headers, content # type: ignore
+ elif isinstance(content, AsyncIterable):
+ headers = {"Transfer-Encoding": "chunked"}
+ return headers, AsyncIteratorByteStream(content)
raise TypeError(f"Unexpected type for 'content', {type(content)!r}")
@@ -106,19 +100,25 @@ def encode_content(
def encode_urlencoded_data(
data: dict,
) -> Tuple[Dict[str, str], ByteStream]:
- body = urlencode(data, doseq=True).encode("utf-8")
+ plain_data = []
+ for key, value in data.items():
+ if isinstance(value, (list, tuple)):
+ plain_data.extend([(key, primitive_value_to_str(item)) for item in value])
+ else:
+ plain_data.append((key, primitive_value_to_str(value)))
+ body = urlencode(plain_data, doseq=True).encode("utf-8")
content_length = str(len(body))
content_type = "application/x-www-form-urlencoded"
headers = {"Content-Length": content_length, "Content-Type": content_type}
- return headers, PlainByteStream(body)
+ return headers, ByteStream(body)
def encode_multipart_data(
data: dict, files: RequestFiles, boundary: bytes = None
-) -> Tuple[Dict[str, str], ByteStream]:
- stream = MultipartStream(data=data, files=files, boundary=boundary)
- headers = stream.get_headers()
- return headers, stream
+) -> Tuple[Dict[str, str], MultipartStream]:
+ multipart = MultipartStream(data=data, files=files, boundary=boundary)
+ headers = multipart.get_headers()
+ return headers, multipart
def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]:
@@ -126,7 +126,7 @@ def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]:
content_length = str(len(body))
content_type = "text/plain; charset=utf-8"
headers = {"Content-Length": content_length, "Content-Type": content_type}
- return headers, PlainByteStream(body)
+ return headers, ByteStream(body)
def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]:
@@ -134,7 +134,7 @@ def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]:
content_length = str(len(body))
content_type = "text/html; charset=utf-8"
headers = {"Content-Length": content_length, "Content-Type": content_type}
- return headers, PlainByteStream(body)
+ return headers, ByteStream(body)
def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]:
@@ -142,7 +142,7 @@ def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]:
content_length = str(len(body))
content_type = "application/json"
headers = {"Content-Length": content_length, "Content-Type": content_type}
- return headers, PlainByteStream(body)
+ return headers, ByteStream(body)
def encode_request(
@@ -151,7 +151,7 @@ def encode_request(
files: RequestFiles = None,
json: Any = None,
boundary: bytes = None,
-) -> Tuple[Dict[str, str], ByteStream]:
+) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]:
"""
Handles encoding the given `content`, `data`, `files`, and `json`,
returning a two-tuple of (, ).
@@ -164,6 +164,8 @@ def encode_request(
# However for compat with requests, we *do* still support
# `data=` usages. We deal with that case here, treating it
# as if `content=<...>` had been supplied instead.
+ message = "Use 'content=<...>' to upload raw bytes/text content."
+ warnings.warn(message, DeprecationWarning)
return encode_content(data)
if content is not None:
@@ -175,7 +177,7 @@ def encode_request(
elif json is not None:
return encode_json(json)
- return {}, PlainByteStream(b"")
+ return {}, ByteStream(b"")
def encode_response(
@@ -183,7 +185,7 @@ def encode_response(
text: str = None,
html: str = None,
json: Any = None,
-) -> Tuple[Dict[str, str], ByteStream]:
+) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]:
"""
Handles encoding the given `content`, returning a two-tuple of
(, ).
@@ -197,4 +199,4 @@ def encode_response(
elif json is not None:
return encode_json(json)
- return {}, PlainByteStream(b"")
+ return {}, ByteStream(b"")
diff --git a/httpx/_decoders.py b/httpx/_decoders.py
index 8ef0157e6f..c0d51a4cdc 100644
--- a/httpx/_decoders.py
+++ b/httpx/_decoders.py
@@ -8,6 +8,8 @@
import typing
import zlib
+from ._exceptions import DecodingError
+
try:
import brotli
except ImportError: # pragma: nocover
@@ -54,13 +56,13 @@ def decode(self, data: bytes) -> bytes:
if was_first_attempt:
self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
return self.decode(data)
- raise ValueError(str(exc))
+ raise DecodingError(str(exc)) from exc
def flush(self) -> bytes:
try:
return self.decompressor.flush()
except zlib.error as exc: # pragma: nocover
- raise ValueError(str(exc))
+ raise DecodingError(str(exc)) from exc
class GZipDecoder(ContentDecoder):
@@ -77,13 +79,13 @@ def decode(self, data: bytes) -> bytes:
try:
return self.decompressor.decompress(data)
except zlib.error as exc:
- raise ValueError(str(exc))
+ raise DecodingError(str(exc)) from exc
def flush(self) -> bytes:
try:
return self.decompressor.flush()
except zlib.error as exc: # pragma: nocover
- raise ValueError(str(exc))
+ raise DecodingError(str(exc)) from exc
class BrotliDecoder(ContentDecoder):
@@ -118,7 +120,7 @@ def decode(self, data: bytes) -> bytes:
try:
return self._decompress(data)
except brotli.error as exc:
- raise ValueError(str(exc))
+ raise DecodingError(str(exc)) from exc
def flush(self) -> bytes:
if not self.seen_data:
@@ -128,7 +130,7 @@ def flush(self) -> bytes:
self.decompressor.finish()
return b""
except brotli.error as exc: # pragma: nocover
- raise ValueError(str(exc))
+ raise DecodingError(str(exc)) from exc
class MultiDecoder(ContentDecoder):
diff --git a/httpx/_exceptions.py b/httpx/_exceptions.py
index bade9f9b81..b6e59aa059 100644
--- a/httpx/_exceptions.py
+++ b/httpx/_exceptions.py
@@ -27,15 +27,13 @@
* CookieConflict
* StreamError
x StreamConsumed
+ x StreamClosed
x ResponseNotRead
x RequestNotRead
- x ResponseClosed
"""
import contextlib
import typing
-import httpcore
-
if typing.TYPE_CHECKING:
from ._models import Request, Response # pragma: nocover
@@ -58,9 +56,8 @@ class HTTPError(Exception):
```
"""
- def __init__(self, message: str, *, request: "Request") -> None:
+ def __init__(self, message: str) -> None:
super().__init__(message)
- self.request = request
class RequestError(HTTPError):
@@ -68,15 +65,30 @@ class RequestError(HTTPError):
Base class for all exceptions that may occur when issuing a `.request()`.
"""
- def __init__(self, message: str, *, request: "Request") -> None:
- super().__init__(message, request=request)
+ def __init__(self, message: str, *, request: "Request" = None) -> None:
+ super().__init__(message)
+ # At the point an exception is raised we won't typically have a request
+ # instance to associate it with.
+ #
+ # The 'request_context' context manager is used within the Client and
+ # Response methods in order to ensure that any raised exceptions
+ # have a `.request` property set on them.
+ self._request = request
+
+ @property
+ def request(self) -> "Request":
+ if self._request is None:
+ raise RuntimeError("The .request property has not been set.")
+ return self._request
+
+ @request.setter
+ def request(self, request: "Request") -> None:
+ self._request = request
class TransportError(RequestError):
"""
Base class for all exceptions that occur at the level of the Transport API.
-
- All of these exceptions also have an equivelent mapping in `httpcore`.
"""
@@ -219,7 +231,8 @@ class HTTPStatusError(HTTPError):
def __init__(
self, message: str, *, request: "Request", response: "Response"
) -> None:
- super().__init__(message, request=request)
+ super().__init__(message)
+ self.request = request
self.response = response
@@ -249,7 +262,7 @@ def __init__(self, message: str) -> None:
# the request/response stream in an invalid manner.
-class StreamError(Exception):
+class StreamError(RuntimeError):
"""
The base class for stream exceptions.
@@ -279,84 +292,48 @@ def __init__(self) -> None:
super().__init__(message)
-class ResponseNotRead(StreamError):
+class StreamClosed(StreamError):
"""
- Attempted to access response content, without having called `read()`
- after a streaming response.
+ Attempted to read or stream response content, but the request has been
+ closed.
"""
def __init__(self) -> None:
message = (
- "Attempted to access response content, without having called `read()` "
- "after a streaming response."
+ "Attempted to read or stream content, but the stream has " "been closed."
)
super().__init__(message)
-class RequestNotRead(StreamError):
+class ResponseNotRead(StreamError):
"""
- Attempted to access request content, without having called `read()`.
+ Attempted to access streaming response content, without having called `read()`.
"""
def __init__(self) -> None:
- message = "Attempted to access request content, without having called `read()`."
+ message = "Attempted to access streaming response content, without having called `read()`."
super().__init__(message)
-class ResponseClosed(StreamError):
+class RequestNotRead(StreamError):
"""
- Attempted to read or stream response content, but the request has been
- closed.
+ Attempted to access streaming request content, without having called `read()`.
"""
def __init__(self) -> None:
- message = (
- "Attempted to read or stream response content, but the request has "
- "been closed."
- )
+ message = "Attempted to access streaming request content, without having called `read()`."
super().__init__(message)
@contextlib.contextmanager
-def map_exceptions(
- mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],
- **kwargs: typing.Any,
-) -> typing.Iterator[None]:
+def request_context(request: "Request" = None) -> typing.Iterator[None]:
+ """
+ A context manager that can be used to attach the given request context
+ to any `RequestError` exceptions that are raised within the block.
+ """
try:
yield
- except Exception as exc:
- mapped_exc = None
-
- for from_exc, to_exc in mapping.items():
- if not isinstance(exc, from_exc):
- continue
- # We want to map to the most specific exception we can find.
- # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to
- # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.
- if mapped_exc is None or issubclass(to_exc, mapped_exc):
- mapped_exc = to_exc
-
- if mapped_exc is None:
- raise
-
- message = str(exc)
- raise mapped_exc(message, **kwargs) from exc # type: ignore
-
-
-HTTPCORE_EXC_MAP = {
- httpcore.TimeoutException: TimeoutException,
- httpcore.ConnectTimeout: ConnectTimeout,
- httpcore.ReadTimeout: ReadTimeout,
- httpcore.WriteTimeout: WriteTimeout,
- httpcore.PoolTimeout: PoolTimeout,
- httpcore.NetworkError: NetworkError,
- httpcore.ConnectError: ConnectError,
- httpcore.ReadError: ReadError,
- httpcore.WriteError: WriteError,
- httpcore.CloseError: CloseError,
- httpcore.ProxyError: ProxyError,
- httpcore.UnsupportedProtocol: UnsupportedProtocol,
- httpcore.ProtocolError: ProtocolError,
- httpcore.LocalProtocolError: LocalProtocolError,
- httpcore.RemoteProtocolError: RemoteProtocolError,
-}
+ except RequestError as exc:
+ if request is not None:
+ exc.request = request
+ raise exc
diff --git a/httpx/_models.py b/httpx/_models.py
index 2d11888254..a6157a8728 100644
--- a/httpx/_models.py
+++ b/httpx/_models.py
@@ -1,5 +1,4 @@
import cgi
-import contextlib
import datetime
import email.message
import json as jsonlib
@@ -7,12 +6,13 @@
import urllib.request
from collections.abc import MutableMapping
from http.cookiejar import Cookie, CookieJar
-from urllib.parse import parse_qsl, quote, unquote, urlencode
+from urllib.parse import parse_qs, quote, unquote, urlencode
+import idna
import rfc3986
import rfc3986.exceptions
-from ._content import PlainByteStream, encode_request, encode_response
+from ._content import ByteStream, UnattachedStream, encode_request, encode_response
from ._decoders import (
SUPPORTED_DECODERS,
ByteChunker,
@@ -24,20 +24,18 @@
TextDecoder,
)
from ._exceptions import (
- HTTPCORE_EXC_MAP,
CookieConflict,
- DecodingError,
HTTPStatusError,
InvalidURL,
RequestNotRead,
- ResponseClosed,
ResponseNotRead,
+ StreamClosed,
StreamConsumed,
- map_exceptions,
+ request_context,
)
from ._status_codes import codes
+from ._transports.base import AsyncByteStream, SyncByteStream
from ._types import (
- ByteStream,
CookieTypes,
HeaderTypes,
PrimitiveData,
@@ -50,28 +48,28 @@
URLTypes,
)
from ._utils import (
- flatten_queryparams,
guess_json_utf,
is_known_encoding,
normalize_header_key,
normalize_header_value,
obfuscate_sensitive_headers,
parse_header_links,
- str_query_param,
+ primitive_value_to_str,
)
class URL:
"""
- url = httpx.URL("HTTPS://jo%40email.com:a%20secret@example.com:1234/pa%20th?search=ab#anchorlink")
+ url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink")
assert url.scheme == "https"
assert url.username == "jo@email.com"
assert url.password == "a secret"
assert url.userinfo == b"jo%40email.com:a%20secret"
- assert url.host == "example.com"
+ assert url.host == "müller.de"
+ assert url.raw_host == b"xn--mller-kva.de"
assert url.port == 1234
- assert url.netloc == "example.com:1234"
+ assert url.netloc == b"xn--mller-kva.de:1234"
assert url.path == "/pa th"
assert url.query == b"?search=ab"
assert url.raw_path == b"/pa%20th?search=ab"
@@ -79,17 +77,35 @@ class URL:
The components of a URL are broken down like this:
- https://jo%40email.com:a%20secret@example.com:1234/pa%20th?search=ab#anchorlink
- [scheme][ username ] [password] [ host ][port][ path ] [ query ] [fragment]
- [ userinfo ] [ netloc ][ raw_path ]
+ https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink
+ [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment]
+ [ userinfo ] [ netloc ][ raw_path ]
Note that:
* `url.scheme` is normalized to always be lowercased.
- * `url.host` is normalized to always be lowercased, and is IDNA encoded. For instance:
- url = httpx.URL("http://中国.icom.museum")
- assert url.host == "xn--fiqs8s.icom.museum"
+ * `url.host` is normalized to always be lowercased. Internationalized domain
+ names are represented in unicode, without IDNA encoding applied. For instance:
+
+ url = httpx.URL("http://中国.icom.museum")
+ assert url.host == "中国.icom.museum"
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
+ assert url.host == "中国.icom.museum"
+
+ * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded.
+
+ url = httpx.URL("http://中国.icom.museum")
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
+
+ * `url.port` is either None or an integer. URLs that include the default port for
+ "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`.
+
+ assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80")
+ assert httpx.URL("http://example.com").port is None
+ assert httpx.URL("http://example.com:80").port is None
* `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with
`url.username` and `url.password` instead, which handle the URL escaping.
@@ -103,7 +119,7 @@ class URL:
"""
def __init__(
- self, url: typing.Union["URL", str, RawURL] = "", params: QueryParamTypes = None
+ self, url: typing.Union["URL", str, RawURL] = "", **kwargs: typing.Any
) -> None:
if isinstance(url, (str, tuple)):
if isinstance(url, tuple):
@@ -135,15 +151,26 @@ def __init__(
f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}"
)
- # Add any query parameters, merging with any in the URL if needed.
- if params:
- if self._uri_reference.query:
- url_params = QueryParams(self._uri_reference.query)
- url_params.update(params)
- query_string = str(url_params)
- else:
- query_string = str(QueryParams(params))
- self._uri_reference = self._uri_reference.copy_with(query=query_string)
+ # Perform port normalization, following the WHATWG spec for default ports.
+ #
+ # See:
+ # * https://tools.ietf.org/html/rfc3986#section-3.2.3
+ # * https://url.spec.whatwg.org/#url-miscellaneous
+ # * https://url.spec.whatwg.org/#scheme-state
+ default_port = {
+ "ftp": ":21",
+ "http": ":80",
+ "https": ":443",
+ "ws": ":80",
+ "wss": ":443",
+ }.get(self._uri_reference.scheme, "")
+ authority = self._uri_reference.authority or ""
+ if default_port and authority.endswith(default_port):
+ authority = authority[: -len(default_port)]
+ self._uri_reference = self._uri_reference.copy_with(authority=authority)
+
+ if kwargs:
+ self._uri_reference = self.copy_with(**kwargs)._uri_reference
@property
def scheme(self) -> str:
@@ -153,6 +180,14 @@ def scheme(self) -> str:
"""
return self._uri_reference.scheme or ""
+ @property
+ def raw_scheme(self) -> bytes:
+ """
+ The raw bytes representation of the URL scheme, such as b"http", b"https".
+ Always normalised to lowercase.
+ """
+ return self.scheme.encode("ascii")
+
@property
def userinfo(self) -> bytes:
"""
@@ -184,7 +219,7 @@ def password(self) -> str:
def host(self) -> str:
"""
The URL host as a string.
- Always normlized to lowercase, and IDNA encoded.
+ Always normalized to lowercase, with IDNA hosts decoded into unicode.
Examples:
@@ -192,36 +227,85 @@ def host(self) -> str:
assert url.host == "www.example.org"
url = httpx.URL("http://中国.icom.museum")
- assert url.host == "xn--fiqs8s.icom.museum"
+ assert url.host == "中国.icom.museum"
+
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
+ assert url.host == "中国.icom.museum"
url = httpx.URL("https://[::ffff:192.168.0.1]")
assert url.host == "::ffff:192.168.0.1"
"""
- host: str = self._uri_reference.host
+ host: str = self._uri_reference.host or ""
+
+ if host and ":" in host and host[0] == "[":
+ # it's an IPv6 address
+ host = host.lstrip("[").rstrip("]")
+
+ if host.startswith("xn--"):
+ host = idna.decode(host)
+
+ return host
+
+ @property
+ def raw_host(self) -> bytes:
+ """
+ The raw bytes representation of the URL host.
+ Always normalized to lowercase, and IDNA encoded.
+
+ Examples:
+
+ url = httpx.URL("http://www.EXAMPLE.org")
+ assert url.raw_host == b"www.example.org"
+
+ url = httpx.URL("http://中国.icom.museum")
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
+
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
+
+ url = httpx.URL("https://[::ffff:192.168.0.1]")
+ assert url.raw_host == b"::ffff:192.168.0.1"
+ """
+ host: str = self._uri_reference.host or ""
if host and ":" in host and host[0] == "[":
# it's an IPv6 address
host = host.lstrip("[").rstrip("]")
- return host or ""
+ return host.encode("ascii")
@property
def port(self) -> typing.Optional[int]:
"""
The URL port as an integer.
+
+ Note that the URL class performs port normalization as per the WHATWG spec.
+ Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always
+ treated as `None`.
+
+ For example:
+
+ assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80")
+ assert httpx.URL("http://www.example.com:80").port is None
"""
port = self._uri_reference.port
return int(port) if port else None
@property
- def netloc(self) -> str:
+ def netloc(self) -> bytes:
"""
- Either `` or `:` as a string.
- Always normlized to lowercase, and IDNA encoded.
+ Either `` or `:` as bytes.
+ Always normalized to lowercase, and IDNA encoded.
+
+ This property may be used for generating the value of a request
+ "Host" header.
"""
host = self._uri_reference.host or ""
port = self._uri_reference.port
- return host if port is None else f"{host}:{port}"
+ netloc = host.encode("ascii")
+ if port:
+ netloc = netloc + b":" + port.encode("ascii")
+ return netloc
@property
def path(self) -> str:
@@ -240,12 +324,27 @@ def path(self) -> str:
def query(self) -> bytes:
"""
The URL query string, as raw bytes, excluding the leading b"?".
- Note that URL decoding can only be applied on URL query strings
- at the point of decoding the individual parameter names/values.
+
+ This is neccessarily a bytewise interface, because we cannot
+ perform URL decoding of this representation until we've parsed
+ the keys and values into a QueryParams instance.
+
+ For example:
+
+ url = httpx.URL("https://example.com/?filter=some%20search%20terms")
+ assert url.query == b"filter=some%20search%20terms"
"""
query = self._uri_reference.query or ""
return query.encode("ascii")
+ @property
+ def params(self) -> "QueryParams":
+ """
+ The URL query parameters, neatly parsed and packaged into an immutable
+ multidict representation.
+ """
+ return QueryParams(self._uri_reference.query)
+
@property
def raw_path(self) -> bytes:
"""
@@ -269,19 +368,19 @@ def fragment(self) -> str:
The URL fragments, as used in HTML anchors.
As a string, without the leading '#'.
"""
- return self._uri_reference.fragment or ""
+ return unquote(self._uri_reference.fragment or "")
@property
def raw(self) -> RawURL:
"""
The URL in the raw representation used by the low level
- transport API. For example, see `httpcore`.
+ transport API. See `BaseTransport.handle_request`.
Provides the (scheme, host, port, target) for the outgoing request.
"""
return (
- self.scheme.encode("ascii"),
- self.host.encode("ascii"),
+ self.raw_scheme,
+ self.raw_host,
self.port,
self.raw_path,
)
@@ -296,7 +395,7 @@ def is_absolute_url(self) -> bool:
# URLs with a fragment portion as not absolute.
# What we actually care about is if the URL provides
# a scheme and hostname to which connections should be made.
- return bool(self.scheme and self.host)
+ return bool(self._uri_reference.scheme and self._uri_reference.host)
@property
def is_relative_url(self) -> bool:
@@ -324,12 +423,18 @@ def copy_with(self, **kwargs: typing.Any) -> "URL":
"userinfo": bytes,
"host": str,
"port": int,
- "netloc": str,
+ "netloc": bytes,
"path": str,
"query": bytes,
"raw_path": bytes,
"fragment": str,
+ "params": object,
}
+
+ # Step 1
+ # ======
+ #
+ # Perform type checking for all supported keyword arguments.
for key, value in kwargs.items():
if key not in allowed:
message = f"{key!r} is an invalid keyword argument for copy_with()"
@@ -340,49 +445,107 @@ def copy_with(self, **kwargs: typing.Any) -> "URL":
message = f"Argument {key!r} must be {expected} but got {seen}"
raise TypeError(message)
- # Replace username, password, userinfo, host, port, netloc with "authority" for rfc3986
+ # Step 2
+ # ======
+ #
+ # Consolidate "username", "password", "userinfo", "host", "port" and "netloc"
+ # into a single "authority" keyword, for `rfc3986`.
if "username" in kwargs or "password" in kwargs:
- # Consolidate username and password into userinfo.
+ # Consolidate "username" and "password" into "userinfo".
username = quote(kwargs.pop("username", self.username) or "")
password = quote(kwargs.pop("password", self.password) or "")
userinfo = f"{username}:{password}" if password else username
kwargs["userinfo"] = userinfo.encode("ascii")
if "host" in kwargs or "port" in kwargs:
- # Consolidate host and port into netloc.
+ # Consolidate "host" and "port" into "netloc".
host = kwargs.pop("host", self.host) or ""
port = kwargs.pop("port", self.port)
if host and ":" in host and host[0] != "[":
- # it's an IPv6 address, so it should be hidden under bracket
+ # IPv6 addresses need to be escaped within sqaure brackets.
host = f"[{host}]"
- kwargs["netloc"] = f"{host}:{port}" if port is not None else host
+ kwargs["netloc"] = (
+ f"{host}:{port}".encode("ascii")
+ if port is not None
+ else host.encode("ascii")
+ )
if "userinfo" in kwargs or "netloc" in kwargs:
- # Consolidate userinfo and netloc into authority.
+ # Consolidate "userinfo" and "netloc" into authority.
userinfo = (kwargs.pop("userinfo", self.userinfo) or b"").decode("ascii")
- netloc = kwargs.pop("netloc", self.netloc) or ""
+ netloc = (kwargs.pop("netloc", self.netloc) or b"").decode("ascii")
authority = f"{userinfo}@{netloc}" if userinfo else netloc
kwargs["authority"] = authority
+ # Step 3
+ # ======
+ #
+ # Wrangle any "path", "query", "raw_path" and "params" keywords into
+ # "query" and "path" keywords for `rfc3986`.
if "raw_path" in kwargs:
+ # If "raw_path" is included, then split it into "path" and "query" components.
raw_path = kwargs.pop("raw_path") or b""
path, has_query, query = raw_path.decode("ascii").partition("?")
kwargs["path"] = path
kwargs["query"] = query if has_query else None
else:
- # Ensure path= for rfc3986
if kwargs.get("path") is not None:
+ # Ensure `kwargs["path"] = ` for `rfc3986`.
kwargs["path"] = quote(kwargs["path"])
- # Ensure query= for rfc3986
if kwargs.get("query") is not None:
+ # Ensure `kwargs["query"] = ` for `rfc3986`.
+ #
+ # Note that `.copy_with(query=None)` and `.copy_with(query=b"")`
+ # are subtly different. The `None` style will not include an empty
+ # trailing "?" character.
kwargs["query"] = kwargs["query"].decode("ascii")
+ if "params" in kwargs:
+ # Replace any "params" keyword with the raw "query" instead.
+ #
+ # Ensure that empty params use `kwargs["query"] = None` rather
+ # than `kwargs["query"] = ""`, so that generated URLs do not
+ # include an empty trailing "?".
+ params = kwargs.pop("params")
+ kwargs["query"] = None if not params else str(QueryParams(params))
+
+ # Step 4
+ # ======
+ #
+ # Ensure any fragment component is quoted.
+ if kwargs.get("fragment") is not None:
+ kwargs["fragment"] = quote(kwargs["fragment"])
+
+ # Step 5
+ # ======
+ #
+ # At this point kwargs may include keys for "scheme", "authority", "path",
+ # "query" and "fragment". Together these constitute the entire URL.
+ #
+ # See https://tools.ietf.org/html/rfc3986#section-3
+ #
+ # foo://example.com:8042/over/there?name=ferret#nose
+ # \_/ \______________/\_________/ \_________/ \__/
+ # | | | | |
+ # scheme authority path query fragment
return URL(self._uri_reference.copy_with(**kwargs).unsplit())
+ def copy_set_param(self, key: str, value: typing.Any = None) -> "URL":
+ return self.copy_with(params=self.params.set(key, value))
+
+ def copy_add_param(self, key: str, value: typing.Any = None) -> "URL":
+ return self.copy_with(params=self.params.add(key, value))
+
+ def copy_remove_param(self, key: str) -> "URL":
+ return self.copy_with(params=self.params.remove(key))
+
+ def copy_merge_params(self, params: QueryParamTypes) -> "URL":
+ return self.copy_with(params=self.params.merge(params))
+
def join(self, url: URLTypes) -> "URL":
"""
Return an absolute URL, using this URL as the base.
@@ -391,10 +554,17 @@ def join(self, url: URLTypes) -> "URL":
url = httpx.URL("https://www.example.com/test")
url = url.join("/new/path")
- assert url == "https://www.example.com/test/new/path"
+ assert url == "https://www.example.com/new/path"
"""
if self.is_relative_url:
- return URL(url)
+ # Workaround to handle relative URLs, which otherwise raise
+ # rfc3986.exceptions.ResolutionError when used as an argument
+ # in `.resolve_with`.
+ return (
+ self.copy_with(scheme="http", host="example.com")
+ .join(url)
+ .copy_with(scheme=None, host=None)
+ )
# We drop any fragment portion, because RFC 3986 strictly
# treats URLs with a fragment portion as not being absolute URLs.
@@ -406,7 +576,7 @@ def __hash__(self) -> int:
return hash(str(self))
def __eq__(self, other: typing.Any) -> bool:
- return isinstance(other, (URL, str)) and str(self) == str(other)
+ return isinstance(other, (URL, str)) and str(self) == str(URL(other))
def __str__(self) -> str:
return self._uri_reference.unsplit()
@@ -415,6 +585,8 @@ def __repr__(self) -> str:
class_name = self.__class__.__name__
url_str = str(self)
if self._uri_reference.userinfo:
+ # Mask any password component in the URL representation, to lower the
+ # risk of unintended leakage, such as in debug information and logging.
username = quote(self.username)
url_str = (
rfc3986.urlparse(url_str)
@@ -438,83 +610,175 @@ def __init__(self, *args: QueryParamTypes, **kwargs: typing.Any) -> None:
items: typing.Sequence[typing.Tuple[str, PrimitiveData]]
if value is None or isinstance(value, (str, bytes)):
value = value.decode("ascii") if isinstance(value, bytes) else value
- items = parse_qsl(value)
+ self._dict = parse_qs(value)
elif isinstance(value, QueryParams):
- items = value.multi_items()
- elif isinstance(value, (list, tuple)):
- items = value
+ self._dict = {k: list(v) for k, v in value._dict.items()}
else:
- items = flatten_queryparams(value)
-
- self._list = [(str(k), str_query_param(v)) for k, v in items]
- self._dict = {str(k): str_query_param(v) for k, v in items}
+ dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {}
+ if isinstance(value, (list, tuple)):
+ # Convert list inputs like:
+ # [("a", "123"), ("a", "456"), ("b", "789")]
+ # To a dict representation, like:
+ # {"a": ["123", "456"], "b": ["789"]}
+ for item in value:
+ dict_value.setdefault(item[0], []).append(item[1])
+ else:
+ # Convert dict inputs like:
+ # {"a": "123", "b": ["456", "789"]}
+ # To dict inputs where values are always lists, like:
+ # {"a": ["123"], "b": ["456", "789"]}
+ dict_value = {
+ k: list(v) if isinstance(v, (list, tuple)) else [v]
+ for k, v in value.items()
+ }
+
+ # Ensure that keys and values are neatly coerced to strings.
+ # We coerce values `True` and `False` to JSON-like "true" and "false"
+ # representations, and coerce `None` values to the empty string.
+ self._dict = {
+ str(k): [primitive_value_to_str(item) for item in v]
+ for k, v in dict_value.items()
+ }
def keys(self) -> typing.KeysView:
+ """
+ Return all the keys in the query params.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123&a=456&b=789")
+ assert list(q.keys()) == ["a", "b"]
+ """
return self._dict.keys()
def values(self) -> typing.ValuesView:
- return self._dict.values()
+ """
+ Return all the values in the query params. If a key occurs more than once
+ only the first item for that key is returned.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123&a=456&b=789")
+ assert list(q.values()) == ["123", "789"]
+ """
+ return {k: v[0] for k, v in self._dict.items()}.values()
def items(self) -> typing.ItemsView:
"""
Return all items in the query params. If a key occurs more than once
only the first item for that key is returned.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123&a=456&b=789")
+ assert list(q.items()) == [("a", "123"), ("b", "789")]
"""
- return self._dict.items()
+ return {k: v[0] for k, v in self._dict.items()}.items()
def multi_items(self) -> typing.List[typing.Tuple[str, str]]:
"""
Return all items in the query params. Allow duplicate keys to occur.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123&a=456&b=789")
+ assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")]
"""
- return list(self._list)
+ multi_items: typing.List[typing.Tuple[str, str]] = []
+ for k, v in self._dict.items():
+ multi_items.extend([(k, i) for i in v])
+ return multi_items
def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
"""
Get a value from the query param for a given key. If the key occurs
more than once, then only the first value is returned.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123&a=456&b=789")
+ assert q.get("a") == "123"
"""
if key in self._dict:
- return self._dict[key]
+ return self._dict[str(key)][0]
return default
- def get_list(self, key: typing.Any) -> typing.List[str]:
+ def get_list(self, key: str) -> typing.List[str]:
"""
Get all values from the query param for a given key.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123&a=456&b=789")
+ assert q.get_list("a") == ["123", "456"]
"""
- return [item_value for item_key, item_value in self._list if item_key == key]
+ return list(self._dict.get(str(key), []))
- def update(self, params: QueryParamTypes = None) -> None:
- if not params:
- return
-
- params = QueryParams(params)
- for param in params:
- item, *extras = params.get_list(param)
- self[param] = item
- if extras:
- self._list.extend((param, e) for e in extras)
- # ensure getter matches merged QueryParams getter
- self._dict[param] = params[param]
+ def set(self, key: str, value: typing.Any = None) -> "QueryParams":
+ """
+ Return a new QueryParams instance, setting the value of a key.
- def __getitem__(self, key: typing.Any) -> str:
- return self._dict[key]
+ Usage:
- def __setitem__(self, key: str, value: str) -> None:
- self._dict[key] = value
+ q = httpx.QueryParams("a=123")
+ q = q.set("a", "456")
+ assert q == httpx.QueryParams("a=456")
+ """
+ q = QueryParams()
+ q._dict = dict(self._dict)
+ q._dict[str(key)] = [primitive_value_to_str(value)]
+ return q
- found_indexes = []
- for idx, (item_key, _) in enumerate(self._list):
- if item_key == key:
- found_indexes.append(idx)
+ def add(self, key: str, value: typing.Any = None) -> "QueryParams":
+ """
+ Return a new QueryParams instance, setting or appending the value of a key.
- for idx in reversed(found_indexes[1:]):
- del self._list[idx]
+ Usage:
- if found_indexes:
- idx = found_indexes[0]
- self._list[idx] = (key, value)
- else:
- self._list.append((key, value))
+ q = httpx.QueryParams("a=123")
+ q = q.add("a", "456")
+ assert q == httpx.QueryParams("a=123&a=456")
+ """
+ q = QueryParams()
+ q._dict = dict(self._dict)
+ q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)]
+ return q
+
+ def remove(self, key: str) -> "QueryParams":
+ """
+ Return a new QueryParams instance, removing the value of a key.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123")
+ q = q.remove("a")
+ assert q == httpx.QueryParams("")
+ """
+ q = QueryParams()
+ q._dict = dict(self._dict)
+ q._dict.pop(str(key), None)
+ return q
+
+ def merge(self, params: QueryParamTypes = None) -> "QueryParams":
+ """
+ Return a new QueryParams instance, updated with.
+
+ Usage:
+
+ q = httpx.QueryParams("a=123")
+ q = q.merge({"b": "456"})
+ assert q == httpx.QueryParams("a=123&b=456")
+
+ q = httpx.QueryParams("a=123")
+ q = q.merge({"a": "456", "b": "789"})
+ assert q == httpx.QueryParams("a=456&b=789")
+ """
+ q = QueryParams(params)
+ q._dict = {**self._dict, **q._dict}
+ return q
+
+ def __getitem__(self, key: typing.Any) -> str:
+ return self._dict[key][0]
def __contains__(self, key: typing.Any) -> bool:
return key in self._dict
@@ -525,19 +789,37 @@ def __iter__(self) -> typing.Iterator[typing.Any]:
def __len__(self) -> int:
return len(self._dict)
+ def __bool__(self) -> bool:
+ return bool(self._dict)
+
+ def __hash__(self) -> int:
+ return hash(str(self))
+
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, self.__class__):
return False
- return sorted(self._list) == sorted(other._list)
+ return sorted(self.multi_items()) == sorted(other.multi_items())
def __str__(self) -> str:
- return urlencode(self._list)
+ return urlencode(self.multi_items())
def __repr__(self) -> str:
class_name = self.__class__.__name__
query_string = str(self)
return f"{class_name}({query_string!r})"
+ def update(self, params: QueryParamTypes = None) -> None:
+ raise RuntimeError(
+ "QueryParams are immutable since 0.18.0. "
+ "Use `q = q.merge(...)` to create an updated copy."
+ )
+
+ def __setitem__(self, key: str, value: str) -> None:
+ raise RuntimeError(
+ "QueryParams are immutable since 0.18.0. "
+ "Use `q = q.set(key, value)` to create an updated copy."
+ )
+
class Headers(typing.MutableMapping[str, str]):
"""
@@ -794,13 +1076,15 @@ def __init__(
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
- stream: ByteStream = None,
+ stream: typing.Union[SyncByteStream, AsyncByteStream] = None,
):
if isinstance(method, bytes):
self.method = method.decode("ascii").upper()
else:
self.method = method.upper()
- self.url = URL(url, params=params)
+ self.url = URL(url)
+ if params is not None:
+ self.url = self.url.copy_merge_params(params=params)
self.headers = Headers(headers)
if cookies:
Cookies(cookies).set_cookie_header(self)
@@ -825,6 +1109,9 @@ def __init__(
headers, stream = encode_request(content, data, files, json)
self._prepare(headers)
self.stream = stream
+ # Load the request body, except for streaming content.
+ if isinstance(stream, ByteStream):
+ self.read()
def _prepare(self, default_headers: typing.Dict[str, str]) -> None:
for key, value in default_headers.items():
@@ -841,12 +1128,7 @@ def _prepare(self, default_headers: typing.Dict[str, str]) -> None:
)
if not has_host and self.url.host:
- default_port = {"http": 80, "https": 443}.get(self.url.scheme)
- if self.url.port is None or self.url.port == default_port:
- host_header = self.url.host.encode("ascii")
- else:
- host_header = self.url.netloc.encode("ascii")
- auto_headers.append((b"Host", host_header))
+ auto_headers.append((b"Host", self.url.netloc))
if not has_content_length and self.method in ("POST", "PUT", "PATCH"):
auto_headers.append((b"Content-Length", b"0"))
@@ -865,10 +1147,11 @@ def read(self) -> bytes:
if not hasattr(self, "_content"):
assert isinstance(self.stream, typing.Iterable)
self._content = b"".join(self.stream)
- # If a streaming request has been read entirely into memory, then
- # we can replace the stream with a raw bytes implementation,
- # to ensure that any non-replayable streams can still be used.
- self.stream = PlainByteStream(self._content)
+ if not isinstance(self.stream, ByteStream):
+ # If a streaming request has been read entirely into memory, then
+ # we can replace the stream with a raw bytes implementation,
+ # to ensure that any non-replayable streams can still be used.
+ self.stream = ByteStream(self._content)
return self._content
async def aread(self) -> bytes:
@@ -878,10 +1161,11 @@ async def aread(self) -> bytes:
if not hasattr(self, "_content"):
assert isinstance(self.stream, typing.AsyncIterable)
self._content = b"".join([part async for part in self.stream])
- # If a streaming request has been read entirely into memory, then
- # we can replace the stream with a raw bytes implementation,
- # to ensure that any non-replayable streams can still be used.
- self.stream = PlainByteStream(self._content)
+ if not isinstance(self.stream, ByteStream):
+ # If a streaming request has been read entirely into memory, then
+ # we can replace the stream with a raw bytes implementation,
+ # to ensure that any non-replayable streams can still be used.
+ self.stream = ByteStream(self._content)
return self._content
def __repr__(self) -> str:
@@ -889,6 +1173,18 @@ def __repr__(self) -> str:
url = str(self.url)
return f"<{class_name}({self.method!r}, {url!r})>"
+ def __getstate__(self) -> typing.Dict[str, typing.Any]:
+ return {
+ name: value
+ for name, value in self.__dict__.items()
+ if name not in ["stream"]
+ }
+
+ def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None:
+ for name, value in state.items():
+ setattr(self, name, value)
+ self.stream = UnattachedStream()
+
class Response:
def __init__(
@@ -900,11 +1196,10 @@ def __init__(
text: str = None,
html: str = None,
json: typing.Any = None,
- stream: ByteStream = None,
+ stream: typing.Union[SyncByteStream, AsyncByteStream] = None,
request: Request = None,
- ext: dict = None,
+ extensions: dict = None,
history: typing.List["Response"] = None,
- on_close: typing.Callable = None,
):
self.status_code = status_code
self.headers = Headers(headers)
@@ -915,11 +1210,8 @@ def __init__(
# the client will set `response.next_request`.
self.next_request: typing.Optional[Request] = None
- self.call_next: typing.Optional[typing.Callable] = None
-
- self.ext = {} if ext is None else ext
+ self.extensions = {} if extensions is None else extensions
self.history = [] if history is None else list(history)
- self._on_close = on_close
self.is_closed = False
self.is_stream_consumed = False
@@ -941,7 +1233,7 @@ def __init__(
headers, stream = encode_response(content, text, html, json)
self._prepare(headers)
self.stream = stream
- if content is None or isinstance(content, (bytes, str)):
+ if isinstance(stream, ByteStream):
# Load the response body, except for streaming content.
self.read()
@@ -988,11 +1280,17 @@ def request(self, value: Request) -> None:
@property
def http_version(self) -> str:
- return self.ext.get("http_version", "HTTP/1.1")
+ try:
+ return self.extensions["http_version"].decode("ascii", errors="ignore")
+ except KeyError:
+ return "HTTP/1.1"
@property
def reason_phrase(self) -> str:
- return self.ext.get("reason", codes.get_reason_phrase(self.status_code))
+ try:
+ return self.extensions["reason_phrase"].decode("ascii", errors="ignore")
+ except KeyError:
+ return codes.get_reason_phrase(self.status_code)
@property
def url(self) -> typing.Optional[URL]:
@@ -1145,16 +1443,18 @@ def num_bytes_downloaded(self) -> int:
def __repr__(self) -> str:
return f""
- @contextlib.contextmanager
- def _wrap_decoder_errors(self) -> typing.Iterator[None]:
- # If the response has an associated request instance, we want decoding
- # errors to be raised as proper `httpx.DecodingError` exceptions.
- try:
- yield
- except ValueError as exc:
- if self._request is None:
- raise exc
- raise DecodingError(message=str(exc), request=self.request) from exc
+ def __getstate__(self) -> typing.Dict[str, typing.Any]:
+ return {
+ name: value
+ for name, value in self.__dict__.items()
+ if name not in ["stream", "is_closed", "_decoder"]
+ }
+
+ def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None:
+ for name, value in state.items():
+ setattr(self, name, value)
+ self.is_closed = True
+ self.stream = UnattachedStream()
def read(self) -> bytes:
"""
@@ -1176,7 +1476,7 @@ def iter_bytes(self, chunk_size: int = None) -> typing.Iterator[bytes]:
else:
decoder = self._get_content_decoder()
chunker = ByteChunker(chunk_size=chunk_size)
- with self._wrap_decoder_errors():
+ with request_context(request=self._request):
for raw_bytes in self.iter_raw():
decoded = decoder.decode(raw_bytes)
for chunk in chunker.decode(decoded):
@@ -1195,7 +1495,7 @@ def iter_text(self, chunk_size: int = None) -> typing.Iterator[str]:
"""
decoder = TextDecoder(encoding=self.encoding)
chunker = TextChunker(chunk_size=chunk_size)
- with self._wrap_decoder_errors():
+ with request_context(request=self._request):
for byte_content in self.iter_bytes():
text_content = decoder.decode(byte_content)
for chunk in chunker.decode(text_content):
@@ -1208,7 +1508,7 @@ def iter_text(self, chunk_size: int = None) -> typing.Iterator[str]:
def iter_lines(self) -> typing.Iterator[str]:
decoder = LineDecoder()
- with self._wrap_decoder_errors():
+ with request_context(request=self._request):
for text in self.iter_text():
for line in decoder.decode(text):
yield line
@@ -1222,15 +1522,15 @@ def iter_raw(self, chunk_size: int = None) -> typing.Iterator[bytes]:
if self.is_stream_consumed:
raise StreamConsumed()
if self.is_closed:
- raise ResponseClosed()
- if not isinstance(self.stream, typing.Iterable):
+ raise StreamClosed()
+ if not isinstance(self.stream, SyncByteStream):
raise RuntimeError("Attempted to call a sync iterator on an async stream.")
self.is_stream_consumed = True
self._num_bytes_downloaded = 0
chunker = ByteChunker(chunk_size=chunk_size)
- with map_exceptions(HTTPCORE_EXC_MAP, request=self._request):
+ with request_context(request=self._request):
for raw_stream_bytes in self.stream:
self._num_bytes_downloaded += len(raw_stream_bytes)
for chunk in chunker.decode(raw_stream_bytes):
@@ -1246,10 +1546,13 @@ def close(self) -> None:
Close the response and release the connection.
Automatically called if the response body is read to completion.
"""
+ if not isinstance(self.stream, SyncByteStream):
+ raise RuntimeError("Attempted to call an sync close on an async stream.")
+
if not self.is_closed:
self.is_closed = True
- if self._on_close is not None:
- self._on_close(self)
+ with request_context(request=self._request):
+ self.stream.close()
async def aread(self) -> bytes:
"""
@@ -1271,7 +1574,7 @@ async def aiter_bytes(self, chunk_size: int = None) -> typing.AsyncIterator[byte
else:
decoder = self._get_content_decoder()
chunker = ByteChunker(chunk_size=chunk_size)
- with self._wrap_decoder_errors():
+ with request_context(request=self._request):
async for raw_bytes in self.aiter_raw():
decoded = decoder.decode(raw_bytes)
for chunk in chunker.decode(decoded):
@@ -1290,7 +1593,7 @@ async def aiter_text(self, chunk_size: int = None) -> typing.AsyncIterator[str]:
"""
decoder = TextDecoder(encoding=self.encoding)
chunker = TextChunker(chunk_size=chunk_size)
- with self._wrap_decoder_errors():
+ with request_context(request=self._request):
async for byte_content in self.aiter_bytes():
text_content = decoder.decode(byte_content)
for chunk in chunker.decode(text_content):
@@ -1303,7 +1606,7 @@ async def aiter_text(self, chunk_size: int = None) -> typing.AsyncIterator[str]:
async def aiter_lines(self) -> typing.AsyncIterator[str]:
decoder = LineDecoder()
- with self._wrap_decoder_errors():
+ with request_context(request=self._request):
async for text in self.aiter_text():
for line in decoder.decode(text):
yield line
@@ -1317,15 +1620,15 @@ async def aiter_raw(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]
if self.is_stream_consumed:
raise StreamConsumed()
if self.is_closed:
- raise ResponseClosed()
- if not isinstance(self.stream, typing.AsyncIterable):
- raise RuntimeError("Attempted to call a async iterator on a sync stream.")
+ raise StreamClosed()
+ if not isinstance(self.stream, AsyncByteStream):
+ raise RuntimeError("Attempted to call an async iterator on an sync stream.")
self.is_stream_consumed = True
self._num_bytes_downloaded = 0
chunker = ByteChunker(chunk_size=chunk_size)
- with map_exceptions(HTTPCORE_EXC_MAP, request=self._request):
+ with request_context(request=self._request):
async for raw_stream_bytes in self.stream:
self._num_bytes_downloaded += len(raw_stream_bytes)
for chunk in chunker.decode(raw_stream_bytes):
@@ -1341,10 +1644,13 @@ async def aclose(self) -> None:
Close the response and release the connection.
Automatically called if the response body is read to completion.
"""
+ if not isinstance(self.stream, AsyncByteStream):
+ raise RuntimeError("Attempted to call an async close on an sync stream.")
+
if not self.is_closed:
self.is_closed = True
- if self._on_close is not None:
- await self._on_close(self)
+ with request_context(request=self._request):
+ await self.stream.aclose()
class Cookies(MutableMapping):
diff --git a/httpx/_multipart.py b/httpx/_multipart.py
index f690afc9ae..cb23d0cfa5 100644
--- a/httpx/_multipart.py
+++ b/httpx/_multipart.py
@@ -3,11 +3,13 @@
import typing
from pathlib import Path
+from ._transports.base import AsyncByteStream, SyncByteStream
from ._types import FileContent, FileTypes, RequestFiles
from ._utils import (
format_form_param,
guess_content_type,
peek_filelike_length,
+ primitive_value_to_str,
to_bytes,
)
@@ -17,17 +19,21 @@ class DataField:
A single form field item, within a multipart form field.
"""
- def __init__(self, name: str, value: typing.Union[str, bytes]) -> None:
+ def __init__(
+ self, name: str, value: typing.Union[str, bytes, int, float, None]
+ ) -> None:
if not isinstance(name, str):
raise TypeError(
f"Invalid type for name. Expected str, got {type(name)}: {name!r}"
)
- if not isinstance(value, (str, bytes)):
+ if value is not None and not isinstance(value, (str, bytes, int, float)):
raise TypeError(
- f"Invalid type for value. Expected str or bytes, got {type(value)}: {value!r}"
+ f"Invalid type for value. Expected primitive type, got {type(value)}: {value!r}"
)
self.name = name
- self.value = value
+ self.value: typing.Union[str, bytes] = (
+ value if isinstance(value, bytes) else primitive_value_to_str(value)
+ )
def render_headers(self) -> bytes:
if not hasattr(self, "_headers"):
@@ -40,11 +46,7 @@ def render_headers(self) -> bytes:
def render_data(self) -> bytes:
if not hasattr(self, "_data"):
- self._data = (
- self.value
- if isinstance(self.value, bytes)
- else self.value.encode("utf-8")
- )
+ self._data = to_bytes(self.value)
return self._data
@@ -88,7 +90,7 @@ def get_length(self) -> int:
headers = self.render_headers()
if isinstance(self.file, (str, bytes)):
- return len(headers) + len(self.file)
+ return len(headers) + len(to_bytes(self.file))
# Let's do our best not to read `file` into memory.
try:
@@ -140,7 +142,7 @@ def render(self) -> typing.Iterator[bytes]:
yield from self.render_data()
-class MultipartStream:
+class MultipartStream(SyncByteStream, AsyncByteStream):
"""
Request content as streaming multipart encoded form data.
"""
diff --git a/httpx/_status_codes.py b/httpx/_status_codes.py
index f7ee6b64a9..100aec641b 100644
--- a/httpx/_status_codes.py
+++ b/httpx/_status_codes.py
@@ -1,4 +1,3 @@
-import warnings
from enum import IntEnum
@@ -142,23 +141,3 @@ def is_server_error(cls, value: int) -> bool:
# Include lower-case styles for `requests` compatibility.
for code in codes:
setattr(codes, code._name_.lower(), int(code))
-
-
-class StatusCodeCompat:
- def __call__(self, *args, **kwargs): # type: ignore
- message = "`httpx.StatusCode` is deprecated. Use `httpx.codes` instead."
- warnings.warn(message, DeprecationWarning)
- return codes(*args, **kwargs)
-
- def __getattr__(self, attr): # type: ignore
- message = "`httpx.StatusCode` is deprecated. Use `httpx.codes` instead."
- warnings.warn(message, DeprecationWarning)
- return getattr(codes, attr)
-
- def __getitem__(self, item): # type: ignore
- message = "`httpx.StatusCode` is deprecated. Use `httpx.codes` instead."
- warnings.warn(message, DeprecationWarning)
- return codes[item]
-
-
-StatusCode = StatusCodeCompat()
diff --git a/httpx/_transports/asgi.py b/httpx/_transports/asgi.py
index 758d8375b2..24c5452dc9 100644
--- a/httpx/_transports/asgi.py
+++ b/httpx/_transports/asgi.py
@@ -1,15 +1,16 @@
-from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Union
+import typing
from urllib.parse import unquote
-import httpcore
import sniffio
-if TYPE_CHECKING: # pragma: no cover
+from .base import AsyncBaseTransport, AsyncByteStream
+
+if typing.TYPE_CHECKING: # pragma: no cover
import asyncio
import trio
- Event = Union[asyncio.Event, trio.Event]
+ Event = typing.Union[asyncio.Event, trio.Event]
def create_event() -> "Event":
@@ -23,7 +24,15 @@ def create_event() -> "Event":
return asyncio.Event()
-class ASGITransport(httpcore.AsyncHTTPTransport):
+class ASGIResponseStream(AsyncByteStream):
+ def __init__(self, body: typing.List[bytes]) -> None:
+ self._body = body
+
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
+ yield b"".join(self._body)
+
+
+class ASGITransport(AsyncBaseTransport):
"""
A custom AsyncTransport that handles sending requests directly to an ASGI app.
The simplest way to use this functionality is to use the `app` argument.
@@ -58,27 +67,26 @@ class ASGITransport(httpcore.AsyncHTTPTransport):
def __init__(
self,
- app: Callable,
+ app: typing.Callable,
raise_app_exceptions: bool = True,
root_path: str = "",
- client: Tuple[str, int] = ("127.0.0.1", 123),
+ client: typing.Tuple[str, int] = ("127.0.0.1", 123),
) -> None:
self.app = app
self.raise_app_exceptions = raise_app_exceptions
self.root_path = root_path
self.client = client
- async def arequest(
+ async def handle_async_request(
self,
method: bytes,
- url: Tuple[bytes, bytes, Optional[int], bytes],
- headers: List[Tuple[bytes, bytes]] = None,
- stream: httpcore.AsyncByteStream = None,
- ext: dict = None,
- ) -> Tuple[int, List[Tuple[bytes, bytes]], httpcore.AsyncByteStream, dict]:
- headers = [] if headers is None else headers
- stream = httpcore.PlainByteStream(content=b"") if stream is None else stream
-
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: AsyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict
+ ]:
# ASGI scope.
scheme, host, port, full_path = url
path, _, query = full_path.partition(b"?")
@@ -155,7 +163,7 @@ async def send(message: dict) -> None:
assert status_code is not None
assert response_headers is not None
- stream = httpcore.PlainByteStream(content=b"".join(body_parts))
- ext = {}
+ stream = ASGIResponseStream(body_parts)
+ extensions = {}
- return (status_code, response_headers, stream, ext)
+ return (status_code, response_headers, stream, extensions)
diff --git a/httpx/_transports/base.py b/httpx/_transports/base.py
new file mode 100644
index 0000000000..eb51926970
--- /dev/null
+++ b/httpx/_transports/base.py
@@ -0,0 +1,183 @@
+import typing
+from types import TracebackType
+
+T = typing.TypeVar("T", bound="BaseTransport")
+A = typing.TypeVar("A", bound="AsyncBaseTransport")
+
+
+class SyncByteStream:
+ def __iter__(self) -> typing.Iterator[bytes]:
+ raise NotImplementedError(
+ "The '__iter__' method must be implemented."
+ ) # pragma: nocover
+ yield b"" # pragma: nocover
+
+ def close(self) -> None:
+ """
+ Subclasses can override this method to release any network resources
+ after a request/response cycle is complete.
+
+ Streaming cases should use a `try...finally` block to ensure that
+ the stream `close()` method is always called.
+
+ Example:
+
+ status_code, headers, stream, extensions = transport.handle_request(...)
+ try:
+ ...
+ finally:
+ stream.close()
+ """
+
+ def read(self) -> bytes:
+ """
+ Simple cases can use `.read()` as a convience method for consuming
+ the entire stream and then closing it.
+
+ Example:
+
+ status_code, headers, stream, extensions = transport.handle_request(...)
+ body = stream.read()
+ """
+ try:
+ return b"".join([part for part in self])
+ finally:
+ self.close()
+
+
+class AsyncByteStream:
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
+ raise NotImplementedError(
+ "The '__aiter__' method must be implemented."
+ ) # pragma: nocover
+ yield b"" # pragma: nocover
+
+ async def aclose(self) -> None:
+ pass
+
+ async def aread(self) -> bytes:
+ try:
+ return b"".join([part async for part in self])
+ finally:
+ await self.aclose()
+
+
+class BaseTransport:
+ def __enter__(self: T) -> T:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: typing.Type[BaseException] = None,
+ exc_value: BaseException = None,
+ traceback: TracebackType = None,
+ ) -> None:
+ self.close()
+
+ def handle_request(
+ self,
+ method: bytes,
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: SyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict
+ ]:
+ """
+ Send a single HTTP request and return a response.
+
+ At this layer of API we're simply using plain primitives. No `Request` or
+ `Response` models, no fancy `URL` or `Header` handling. This strict point
+ of cut-off provides a clear design seperation between the HTTPX API,
+ and the low-level network handling.
+
+ Developers shouldn't typically ever need to call into this API directly,
+ since the Client class provides all the higher level user-facing API
+ niceties.
+
+ In order to properly release any network resources, the response stream
+ should *either* be consumed immediately, with a call to `stream.read()`,
+ or else the `handle_request` call should be followed with a try/finally
+ block to ensuring the stream is always closed.
+
+ Example usage:
+
+ with httpx.HTTPTransport() as transport:
+ status_code, headers, stream, extensions = transport.handle_request(
+ method=b'GET',
+ url=(b'https', b'www.example.com', 443, b'/'),
+ headers=[(b'Host', b'www.example.com')],
+ stream=[],
+ extensions={}
+ )
+ body = stream.read()
+ print(status_code, headers, body)
+
+ Arguments:
+
+ method: The request method as bytes. Eg. b'GET'.
+ url: The components of the request URL, as a tuple of `(scheme, host, port, target)`.
+ The target will usually be the URL path, but also allows for alternative
+ formulations, such as proxy requests which include the complete URL in
+ the target portion of the HTTP request, or for "OPTIONS *" requests, which
+ cannot be expressed in a URL string.
+ headers: The request headers as a list of byte pairs.
+ stream: The request body as a bytes iterator.
+ extensions: An open ended dictionary, including optional extensions to the
+ core request/response API. Keys may include:
+ timeout: A dictionary of str:Optional[float] timeout values.
+ May include values for 'connect', 'read', 'write', or 'pool'.
+
+ Returns a tuple of:
+
+ status_code: The response status code as an integer. Should be in the range 1xx-5xx.
+ headers: The response headers as a list of byte pairs.
+ stream: The response body as a bytes iterator.
+ extensions: An open ended dictionary, including optional extensions to the
+ core request/response API. Keys are plain strings, and may include:
+ reason_phrase: The reason-phrase of the HTTP response, as bytes. Eg b'OK'.
+ HTTP/2 onwards does not include a reason phrase on the wire.
+ When no key is included, a default based on the status code may
+ be used. An empty-string reason phrase should not be substituted
+ for a default, as it indicates the server left the portion blank
+ eg. the leading response bytes were b"HTTP/1.1 200 ".
+ http_version: The HTTP version, as bytes. Eg. b"HTTP/1.1".
+ When no http_version key is included, HTTP/1.1 may be assumed.
+ """
+ raise NotImplementedError(
+ "The 'handle_request' method must be implemented."
+ ) # pragma: nocover
+
+ def close(self) -> None:
+ pass
+
+
+class AsyncBaseTransport:
+ async def __aenter__(self: A) -> A:
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: typing.Type[BaseException] = None,
+ exc_value: BaseException = None,
+ traceback: TracebackType = None,
+ ) -> None:
+ await self.aclose()
+
+ async def handle_async_request(
+ self,
+ method: bytes,
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: AsyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict
+ ]:
+ raise NotImplementedError(
+ "The 'handle_async_request' method must be implemented."
+ ) # pragma: nocover
+
+ async def aclose(self) -> None:
+ pass
diff --git a/httpx/_transports/default.py b/httpx/_transports/default.py
index 84aeb26be8..39e410824d 100644
--- a/httpx/_transports/default.py
+++ b/httpx/_transports/default.py
@@ -24,21 +24,94 @@
transport = httpx.HTTPTransport(uds="socket.uds")
client = httpx.Client(transport=transport)
"""
+import contextlib
import typing
from types import TracebackType
import httpcore
from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context
+from .._exceptions import (
+ CloseError,
+ ConnectError,
+ ConnectTimeout,
+ LocalProtocolError,
+ NetworkError,
+ PoolTimeout,
+ ProtocolError,
+ ProxyError,
+ ReadError,
+ ReadTimeout,
+ RemoteProtocolError,
+ TimeoutException,
+ UnsupportedProtocol,
+ WriteError,
+ WriteTimeout,
+)
from .._types import CertTypes, VerifyTypes
+from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream
T = typing.TypeVar("T", bound="HTTPTransport")
A = typing.TypeVar("A", bound="AsyncHTTPTransport")
-Headers = typing.List[typing.Tuple[bytes, bytes]]
-URL = typing.Tuple[bytes, bytes, typing.Optional[int], bytes]
-class HTTPTransport(httpcore.SyncHTTPTransport):
+@contextlib.contextmanager
+def map_httpcore_exceptions() -> typing.Iterator[None]:
+ try:
+ yield
+ except Exception as exc:
+ mapped_exc = None
+
+ for from_exc, to_exc in HTTPCORE_EXC_MAP.items():
+ if not isinstance(exc, from_exc):
+ continue
+ # We want to map to the most specific exception we can find.
+ # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to
+ # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.
+ if mapped_exc is None or issubclass(to_exc, mapped_exc):
+ mapped_exc = to_exc
+
+ if mapped_exc is None: # pragma: nocover
+ raise
+
+ message = str(exc)
+ raise mapped_exc(message) from exc
+
+
+HTTPCORE_EXC_MAP = {
+ httpcore.TimeoutException: TimeoutException,
+ httpcore.ConnectTimeout: ConnectTimeout,
+ httpcore.ReadTimeout: ReadTimeout,
+ httpcore.WriteTimeout: WriteTimeout,
+ httpcore.PoolTimeout: PoolTimeout,
+ httpcore.NetworkError: NetworkError,
+ httpcore.ConnectError: ConnectError,
+ httpcore.ReadError: ReadError,
+ httpcore.WriteError: WriteError,
+ httpcore.CloseError: CloseError,
+ httpcore.ProxyError: ProxyError,
+ httpcore.UnsupportedProtocol: UnsupportedProtocol,
+ httpcore.ProtocolError: ProtocolError,
+ httpcore.LocalProtocolError: LocalProtocolError,
+ httpcore.RemoteProtocolError: RemoteProtocolError,
+}
+
+
+class ResponseStream(SyncByteStream):
+ def __init__(self, httpcore_stream: httpcore.SyncByteStream):
+ self._httpcore_stream = httpcore_stream
+
+ def __iter__(self) -> typing.Iterator[bytes]:
+ with map_httpcore_exceptions():
+ for part in self._httpcore_stream:
+ yield part
+
+ def close(self) -> None:
+ with map_httpcore_exceptions():
+ self._httpcore_stream.close()
+
+
+class HTTPTransport(BaseTransport):
def __init__(
self,
verify: VerifyTypes = True,
@@ -91,21 +164,48 @@ def __exit__(
) -> None:
self._pool.__exit__(exc_type, exc_value, traceback)
- def request(
+ def handle_request(
self,
method: bytes,
- url: URL,
- headers: Headers = None,
- stream: httpcore.SyncByteStream = None,
- ext: dict = None,
- ) -> typing.Tuple[int, Headers, httpcore.SyncByteStream, dict]:
- return self._pool.request(method, url, headers=headers, stream=stream, ext=ext)
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: SyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict
+ ]:
+ with map_httpcore_exceptions():
+ status_code, headers, byte_stream, extensions = self._pool.handle_request(
+ method=method,
+ url=url,
+ headers=headers,
+ stream=httpcore.IteratorByteStream(iter(stream)),
+ extensions=extensions,
+ )
+
+ stream = ResponseStream(byte_stream)
+
+ return status_code, headers, stream, extensions
def close(self) -> None:
self._pool.close()
-class AsyncHTTPTransport(httpcore.AsyncHTTPTransport):
+class AsyncResponseStream(AsyncByteStream):
+ def __init__(self, httpcore_stream: httpcore.AsyncByteStream):
+ self._httpcore_stream = httpcore_stream
+
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
+ with map_httpcore_exceptions():
+ async for part in self._httpcore_stream:
+ yield part
+
+ async def aclose(self) -> None:
+ with map_httpcore_exceptions():
+ await self._httpcore_stream.aclose()
+
+
+class AsyncHTTPTransport(AsyncBaseTransport):
def __init__(
self,
verify: VerifyTypes = True,
@@ -158,17 +258,33 @@ async def __aexit__(
) -> None:
await self._pool.__aexit__(exc_type, exc_value, traceback)
- async def arequest(
+ async def handle_async_request(
self,
method: bytes,
- url: URL,
- headers: Headers = None,
- stream: httpcore.AsyncByteStream = None,
- ext: dict = None,
- ) -> typing.Tuple[int, Headers, httpcore.AsyncByteStream, dict]:
- return await self._pool.arequest(
- method, url, headers=headers, stream=stream, ext=ext
- )
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: AsyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict
+ ]:
+ with map_httpcore_exceptions():
+ (
+ status_code,
+ headers,
+ byte_stream,
+ extensions,
+ ) = await self._pool.handle_async_request(
+ method=method,
+ url=url,
+ headers=headers,
+ stream=httpcore.AsyncIteratorByteStream(stream.__aiter__()),
+ extensions=extensions,
+ )
+
+ stream = AsyncResponseStream(byte_stream)
+
+ return status_code, headers, stream, extensions
async def aclose(self) -> None:
await self._pool.aclose()
diff --git a/httpx/_transports/mock.py b/httpx/_transports/mock.py
index a55a88b7a2..8d59b73820 100644
--- a/httpx/_transports/mock.py
+++ b/httpx/_transports/mock.py
@@ -1,23 +1,24 @@
import asyncio
-from typing import Callable, List, Optional, Tuple
-
-import httpcore
+import typing
from .._models import Request
+from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream
-class MockTransport(httpcore.SyncHTTPTransport, httpcore.AsyncHTTPTransport):
- def __init__(self, handler: Callable) -> None:
+class MockTransport(AsyncBaseTransport, BaseTransport):
+ def __init__(self, handler: typing.Callable) -> None:
self.handler = handler
- def request(
+ def handle_request(
self,
method: bytes,
- url: Tuple[bytes, bytes, Optional[int], bytes],
- headers: List[Tuple[bytes, bytes]] = None,
- stream: httpcore.SyncByteStream = None,
- ext: dict = None,
- ) -> Tuple[int, List[Tuple[bytes, bytes]], httpcore.SyncByteStream, dict]:
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: SyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict
+ ]:
request = Request(
method=method,
url=url,
@@ -30,17 +31,19 @@ def request(
response.status_code,
response.headers.raw,
response.stream,
- response.ext,
+ response.extensions,
)
- async def arequest(
+ async def handle_async_request(
self,
method: bytes,
- url: Tuple[bytes, bytes, Optional[int], bytes],
- headers: List[Tuple[bytes, bytes]] = None,
- stream: httpcore.AsyncByteStream = None,
- ext: dict = None,
- ) -> Tuple[int, List[Tuple[bytes, bytes]], httpcore.AsyncByteStream, dict]:
+ url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: AsyncByteStream,
+ extensions: dict,
+ ) -> typing.Tuple[
+ int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict
+ ]:
request = Request(
method=method,
url=url,
@@ -63,5 +66,5 @@ async def arequest(
response.status_code,
response.headers.raw,
response.stream,
- response.ext,
+ response.extensions,
)
diff --git a/httpx/_transports/wsgi.py b/httpx/_transports/wsgi.py
index 67b44bde42..c8266c7392 100644
--- a/httpx/_transports/wsgi.py
+++ b/httpx/_transports/wsgi.py
@@ -3,7 +3,7 @@
import typing
from urllib.parse import unquote
-import httpcore
+from .base import BaseTransport, SyncByteStream
def _skip_leading_empty_chunks(body: typing.Iterable) -> typing.Iterable:
@@ -14,7 +14,16 @@ def _skip_leading_empty_chunks(body: typing.Iterable) -> typing.Iterable:
return []
-class WSGITransport(httpcore.SyncHTTPTransport):
+class WSGIByteStream(SyncByteStream):
+ def __init__(self, result: typing.Iterable[bytes]) -> None:
+ self._result = _skip_leading_empty_chunks(result)
+
+ def __iter__(self) -> typing.Iterator[bytes]:
+ for part in self._result:
+ yield part
+
+
+class WSGITransport(BaseTransport):
"""
A custom transport that handles sending requests directly to an WSGI app.
The simplest way to use this functionality is to use the `app` argument.
@@ -59,18 +68,17 @@ def __init__(
self.script_name = script_name
self.remote_addr = remote_addr
- def request(
+ def handle_request(
self,
method: bytes,
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
- headers: typing.List[typing.Tuple[bytes, bytes]] = None,
- stream: httpcore.SyncByteStream = None,
- ext: dict = None,
+ headers: typing.List[typing.Tuple[bytes, bytes]],
+ stream: SyncByteStream,
+ extensions: dict,
) -> typing.Tuple[
- int, typing.List[typing.Tuple[bytes, bytes]], httpcore.SyncByteStream, dict
+ int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict
]:
- headers = [] if headers is None else headers
- stream = httpcore.PlainByteStream(content=b"") if stream is None else stream
+ wsgi_input = io.BytesIO(b"".join(stream))
scheme, host, port, full_path = url
path, _, query = full_path.partition(b"?")
@@ -80,7 +88,7 @@ def request(
environ = {
"wsgi.version": (1, 0),
"wsgi.url_scheme": scheme.decode("ascii"),
- "wsgi.input": io.BytesIO(b"".join(stream)),
+ "wsgi.input": wsgi_input,
"wsgi.errors": io.BytesIO(),
"wsgi.multithread": True,
"wsgi.multiprocess": False,
@@ -112,9 +120,8 @@ def start_response(
seen_exc_info = exc_info
result = self.app(environ, start_response)
- # This is needed because the status returned by start_response
- # shouldn't be used until the first non-empty chunk has been served.
- result = _skip_leading_empty_chunks(result)
+
+ stream = WSGIByteStream(result)
assert seen_status is not None
assert seen_response_headers is not None
@@ -126,7 +133,6 @@ def start_response(
(key.encode("ascii"), value.encode("ascii"))
for key, value in seen_response_headers
]
- stream = httpcore.IteratorByteStream(iterator=result)
- ext = {}
+ extensions = {}
- return (status_code, headers, stream, ext)
+ return (status_code, headers, stream, extensions)
diff --git a/httpx/_types.py b/httpx/_types.py
index 385f89ddb9..75bb9006c8 100644
--- a/httpx/_types.py
+++ b/httpx/_types.py
@@ -74,9 +74,8 @@
None,
]
-ByteStream = Union[Iterable[bytes], AsyncIterable[bytes]]
-RequestContent = Union[str, bytes, ByteStream]
-ResponseContent = Union[str, bytes, ByteStream]
+RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]]
+ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]]
RequestData = dict
diff --git a/httpx/_utils.py b/httpx/_utils.py
index 072db3f1e8..dcdc5c3aa5 100644
--- a/httpx/_utils.py
+++ b/httpx/_utils.py
@@ -1,5 +1,4 @@
import codecs
-import collections
import logging
import mimetypes
import netrc
@@ -8,7 +7,6 @@
import sys
import time
import typing
-import warnings
from pathlib import Path
from urllib.request import getproxies
@@ -56,9 +54,9 @@ def normalize_header_value(
return value.encode(encoding or "ascii")
-def str_query_param(value: "PrimitiveData") -> str:
+def primitive_value_to_str(value: "PrimitiveData") -> str:
"""
- Coerce a primitive data type into a string value for query params.
+ Coerce a primitive data type into a string value.
Note that we prefer JSON-style 'true'/'false' for boolean values here.
"""
@@ -370,31 +368,6 @@ def peek_filelike_length(stream: typing.IO) -> int:
return os.fstat(fd).st_size
-def flatten_queryparams(
- queryparams: typing.Mapping[
- str, typing.Union["PrimitiveData", typing.Sequence["PrimitiveData"]]
- ]
-) -> typing.List[typing.Tuple[str, "PrimitiveData"]]:
- """
- Convert a mapping of query params into a flat list of two-tuples
- representing each item.
-
- Example:
- >>> flatten_queryparams_values({"q": "httpx", "tag": ["python", "dev"]})
- [("q", "httpx), ("tag", "python"), ("tag", "dev")]
- """
- items = []
-
- for k, v in queryparams.items():
- if isinstance(v, collections.abc.Sequence) and not isinstance(v, (str, bytes)):
- for u in v:
- items.append((k, u))
- else:
- items.append((k, typing.cast("PrimitiveData", v)))
-
- return items
-
-
class Timer:
async def _get_time(self) -> float:
library = sniffio.current_async_library()
@@ -472,12 +445,11 @@ def __init__(self, pattern: str) -> None:
from ._models import URL
if pattern and ":" not in pattern:
- warn_deprecated(
+ raise ValueError(
f"Proxy keys should use proper URL forms rather "
f"than plain scheme strings. "
f'Instead of "{pattern}", use "{pattern}://"'
)
- pattern += "://"
url = URL(pattern)
self.pattern = pattern
@@ -535,7 +507,3 @@ def __lt__(self, other: "URLPattern") -> bool:
def __eq__(self, other: typing.Any) -> bool:
return isinstance(other, URLPattern) and self.pattern == other.pattern
-
-
-def warn_deprecated(message: str) -> None: # pragma: nocover
- warnings.warn(message, DeprecationWarning, stacklevel=2)
diff --git a/mkdocs.yml b/mkdocs.yml
index ccafcac1f8..4bcc4d75bd 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -20,8 +20,9 @@ nav:
- Developer Interface: 'api.md'
- Exceptions: 'exceptions.md'
- Troubleshooting: 'troubleshooting.md'
- - Third Party Packages: 'third-party-packages.md'
+ - Third Party Packages: 'third_party_packages.md'
- Contributing: 'contributing.md'
+ - Code of Conduct: 'code_of_conduct.md'
markdown_extensions:
- admonition
diff --git a/setup.py b/setup.py
index 0f1b2864ff..1ddae87a9b 100644
--- a/setup.py
+++ b/setup.py
@@ -59,7 +59,8 @@ def get_packages(package):
"certifi",
"sniffio",
"rfc3986[idna2008]>=1.3,<2",
- "httpcore>=0.12.1,<0.13",
+ "httpcore>=0.13.0,<0.14.0",
+ "async_generator; python_version < '3.7'"
],
extras_require={
"http2": "h2==3.*",
diff --git a/tests/client/test_async_client.py b/tests/client/test_async_client.py
index 1d3f4ccafa..0f83eddd7f 100644
--- a/tests/client/test_async_client.py
+++ b/tests/client/test_async_client.py
@@ -1,7 +1,6 @@
import typing
from datetime import timedelta
-import httpcore
import pytest
import httpx
@@ -95,10 +94,21 @@ async def hello_world():
yield b"world!"
async with httpx.AsyncClient() as client:
- response = await client.request("POST", server.url, content=hello_world())
+ response = await client.post(server.url, content=hello_world())
assert response.status_code == 200
+@pytest.mark.usefixtures("async_environment")
+async def test_cannot_stream_sync_request(server):
+ def hello_world(): # pragma: nocover
+ yield b"Hello, "
+ yield b"world!"
+
+ async with httpx.AsyncClient() as client:
+ with pytest.raises(RuntimeError):
+ await client.post(server.url, content=hello_world())
+
+
@pytest.mark.usefixtures("async_environment")
async def test_raise_for_status(server):
async with httpx.AsyncClient() as client:
@@ -169,12 +179,12 @@ async def test_100_continue(server):
@pytest.mark.usefixtures("async_environment")
async def test_context_managed_transport():
- class Transport(httpcore.AsyncHTTPTransport):
+ class Transport(httpx.AsyncBaseTransport):
def __init__(self):
self.events = []
async def aclose(self):
- # The base implementation of httpcore.AsyncHTTPTransport just
+ # The base implementation of httpx.AsyncBaseTransport just
# calls into `.aclose`, so simple transport cases can just override
# this method for any cleanup, where more complex cases
# might want to additionally override `__aenter__`/`__aexit__`.
@@ -201,13 +211,13 @@ async def __aexit__(self, *args):
@pytest.mark.usefixtures("async_environment")
async def test_context_managed_transport_and_mount():
- class Transport(httpcore.AsyncHTTPTransport):
+ class Transport(httpx.AsyncBaseTransport):
def __init__(self, name: str):
self.name: str = name
self.events: typing.List[str] = []
async def aclose(self):
- # The base implementation of httpcore.AsyncHTTPTransport just
+ # The base implementation of httpx.AsyncBaseTransport just
# calls into `.aclose`, so simple transport cases can just override
# this method for any cleanup, where more complex cases
# might want to additionally override `__aenter__`/`__aexit__`.
@@ -303,25 +313,6 @@ async def test_mounted_transport():
assert response.json() == {"app": "mounted"}
-@pytest.mark.usefixtures("async_environment")
-async def test_response_aclose_map_exceptions():
- class BrokenStream:
- async def __aiter__(self):
- # so we're an AsyncIterator
- pass # pragma: nocover
-
- async def aclose(self):
- raise httpcore.CloseError(OSError(104, "Connection reset by peer"))
-
- def handle(request: httpx.Request) -> httpx.Response:
- return httpx.Response(200, stream=BrokenStream())
-
- async with httpx.AsyncClient(transport=httpx.MockTransport(handle)) as client:
- async with client.stream("GET", "http://example.com") as response:
- with pytest.raises(httpx.CloseError):
- await response.aclose()
-
-
@pytest.mark.usefixtures("async_environment")
async def test_async_mock_transport():
async def hello_world(request):
@@ -333,3 +324,12 @@ async def hello_world(request):
response = await client.get("https://www.example.com")
assert response.status_code == 200
assert response.text == "Hello, world!"
+
+
+@pytest.mark.usefixtures("async_environment")
+async def test_server_extensions(server):
+ url = server.url
+ async with httpx.AsyncClient(http2=True) as client:
+ response = await client.get(url)
+ assert response.status_code == 200
+ assert response.extensions["http_version"] == b"HTTP/1.1"
diff --git a/tests/client/test_auth.py b/tests/client/test_auth.py
index c41afeff87..b6cb42d0bb 100644
--- a/tests/client/test_auth.py
+++ b/tests/client/test_auth.py
@@ -630,7 +630,7 @@ async def streaming_body():
async with httpx.AsyncClient(transport=httpx.MockTransport(app)) as client:
with pytest.raises(httpx.StreamConsumed):
- await client.post(url, data=streaming_body(), auth=auth)
+ await client.post(url, content=streaming_body(), auth=auth)
@pytest.mark.asyncio
diff --git a/tests/client/test_client.py b/tests/client/test_client.py
index 13bb7f03ad..01d0de8284 100644
--- a/tests/client/test_client.py
+++ b/tests/client/test_client.py
@@ -1,7 +1,6 @@
import typing
from datetime import timedelta
-import httpcore
import pytest
import httpx
@@ -115,6 +114,16 @@ def test_raw_iterator(server):
assert body == b"Hello, world!"
+def test_cannot_stream_async_request(server):
+ async def hello_world(): # pragma: nocover
+ yield b"Hello, "
+ yield b"world!"
+
+ with httpx.Client() as client:
+ with pytest.raises(RuntimeError):
+ client.post(server.url, content=hello_world())
+
+
def test_raise_for_status(server):
with httpx.Client() as client:
for status_code in (200, 400, 404, 500, 505):
@@ -197,6 +206,12 @@ def test_merge_relative_url_with_dotted_path():
assert request.url == "https://www.example.com/some/testing/123"
+def test_merge_relative_url_with_path_including_colon():
+ client = httpx.Client(base_url="https://www.example.com/some/path")
+ request = client.build_request("GET", "/testing:123")
+ assert request.url == "https://www.example.com/some/path/testing:123"
+
+
def test_merge_relative_url_with_encoded_slashes():
client = httpx.Client(base_url="https://www.example.com/")
request = client.build_request("GET", "/testing%2F123")
@@ -207,23 +222,13 @@ def test_merge_relative_url_with_encoded_slashes():
assert request.url == "https://www.example.com/base%2Fpath/testing"
-def test_pool_limits_deprecated():
- limits = httpx.Limits()
-
- with pytest.warns(DeprecationWarning):
- httpx.Client(pool_limits=limits)
-
- with pytest.warns(DeprecationWarning):
- httpx.AsyncClient(pool_limits=limits)
-
-
def test_context_managed_transport():
- class Transport(httpcore.SyncHTTPTransport):
+ class Transport(httpx.BaseTransport):
def __init__(self):
self.events = []
def close(self):
- # The base implementation of httpcore.SyncHTTPTransport just
+ # The base implementation of httpx.BaseTransport just
# calls into `.close`, so simple transport cases can just override
# this method for any cleanup, where more complex cases
# might want to additionally override `__enter__`/`__exit__`.
@@ -249,13 +254,13 @@ def __exit__(self, *args):
def test_context_managed_transport_and_mount():
- class Transport(httpcore.SyncHTTPTransport):
+ class Transport(httpx.BaseTransport):
def __init__(self, name: str):
self.name: str = name
self.events: typing.List[str] = []
def close(self):
- # The base implementation of httpcore.SyncHTTPTransport just
+ # The base implementation of httpx.BaseTransport just
# calls into `.close`, so simple transport cases can just override
# this method for any cleanup, where more complex cases
# might want to additionally override `__enter__`/`__exit__`.
@@ -378,3 +383,11 @@ def test_all_mounted_transport():
response = client.get("https://www.example.com")
assert response.status_code == 200
assert response.json() == {"app": "mounted"}
+
+
+def test_server_extensions(server):
+ url = server.url.copy_with(path="/http_version_2")
+ with httpx.Client(http2=True) as client:
+ response = client.get(url)
+ assert response.status_code == 200
+ assert response.extensions["http_version"] == b"HTTP/1.1"
diff --git a/tests/client/test_cookies.py b/tests/client/test_cookies.py
index fe9125fa06..f0c8352593 100644
--- a/tests/client/test_cookies.py
+++ b/tests/client/test_cookies.py
@@ -1,5 +1,7 @@
from http.cookiejar import Cookie, CookieJar
+import pytest
+
import httpx
@@ -20,8 +22,25 @@ def test_set_cookie() -> None:
url = "http://example.org/echo_cookies"
cookies = {"example-name": "example-value"}
+ client = httpx.Client(
+ cookies=cookies, transport=httpx.MockTransport(get_and_set_cookies)
+ )
+ response = client.get(url)
+
+ assert response.status_code == 200
+ assert response.json() == {"cookies": "example-name=example-value"}
+
+
+def test_set_per_request_cookie_is_deprecated() -> None:
+ """
+ Sending a request including a per-request cookie is deprecated.
+ """
+ url = "http://example.org/echo_cookies"
+ cookies = {"example-name": "example-value"}
+
client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))
- response = client.get(url, cookies=cookies)
+ with pytest.warns(DeprecationWarning):
+ response = client.get(url, cookies=cookies)
assert response.status_code == 200
assert response.json() == {"cookies": "example-name=example-value"}
@@ -55,8 +74,10 @@ def test_set_cookie_with_cookiejar() -> None:
)
cookies.set_cookie(cookie)
- client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))
- response = client.get(url, cookies=cookies)
+ client = httpx.Client(
+ cookies=cookies, transport=httpx.MockTransport(get_and_set_cookies)
+ )
+ response = client.get(url)
assert response.status_code == 200
assert response.json() == {"cookies": "example-name=example-value"}
@@ -90,8 +111,9 @@ def test_setting_client_cookies_to_cookiejar() -> None:
)
cookies.set_cookie(cookie)
- client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))
- client.cookies = cookies # type: ignore
+ client = httpx.Client(
+ cookies=cookies, transport=httpx.MockTransport(get_and_set_cookies)
+ )
response = client.get(url)
assert response.status_code == 200
@@ -108,7 +130,8 @@ def test_set_cookie_with_cookies_model() -> None:
cookies["example-name"] = "example-value"
client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))
- response = client.get(url, cookies=cookies)
+ client.cookies = cookies
+ response = client.get(url)
assert response.status_code == 200
assert response.json() == {"cookies": "example-name=example-value"}
diff --git a/tests/client/test_proxies.py b/tests/client/test_proxies.py
index b491213dae..6ea4cbe407 100644
--- a/tests/client/test_proxies.py
+++ b/tests/client/test_proxies.py
@@ -79,9 +79,8 @@ def test_proxies_parameter(proxies, expected_proxies):
("http://example.com", {"all://": PROXY_URL, "http://example.com": None}, None),
("http://example.com", {"http://": PROXY_URL}, PROXY_URL),
("http://example.com", {"all://example.com": PROXY_URL}, PROXY_URL),
- ("http://example.com", {"all://example.com:80": PROXY_URL}, None),
("http://example.com", {"http://example.com": PROXY_URL}, PROXY_URL),
- ("http://example.com", {"http://example.com:80": PROXY_URL}, None),
+ ("http://example.com", {"http://example.com:80": PROXY_URL}, PROXY_URL),
("http://example.com:8080", {"http://example.com:8080": PROXY_URL}, PROXY_URL),
("http://example.com:8080", {"http://example.com": PROXY_URL}, PROXY_URL),
(
@@ -256,17 +255,19 @@ def test_proxies_environ(monkeypatch, client_class, url, env, expected):
@pytest.mark.parametrize(
- ["proxies", "expected_scheme"],
+ ["proxies", "is_valid"],
[
- ({"http": "http://127.0.0.1"}, "http://"),
- ({"https": "http://127.0.0.1"}, "https://"),
- ({"all": "http://127.0.0.1"}, "all://"),
+ ({"http": "http://127.0.0.1"}, False),
+ ({"https": "http://127.0.0.1"}, False),
+ ({"all": "http://127.0.0.1"}, False),
+ ({"http://": "http://127.0.0.1"}, True),
+ ({"https://": "http://127.0.0.1"}, True),
+ ({"all://": "http://127.0.0.1"}, True),
],
)
-def test_for_deprecated_proxy_params(proxies, expected_scheme):
- with pytest.deprecated_call() as block:
+def test_for_deprecated_proxy_params(proxies, is_valid):
+ if not is_valid:
+ with pytest.raises(ValueError):
+ httpx.Client(proxies=proxies)
+ else:
httpx.Client(proxies=proxies)
-
- warning_message = str(block.pop(DeprecationWarning))
-
- assert expected_scheme in warning_message
diff --git a/tests/client/test_redirects.py b/tests/client/test_redirects.py
index 84d371e9fa..22c5aa0f1a 100644
--- a/tests/client/test_redirects.py
+++ b/tests/client/test_redirects.py
@@ -1,4 +1,3 @@
-import httpcore
import pytest
import httpx
@@ -6,9 +5,7 @@
def redirects(request: httpx.Request) -> httpx.Response:
if request.url.scheme not in ("http", "https"):
- raise httpcore.UnsupportedProtocol(
- f"Scheme {request.url.scheme!r} not supported."
- )
+ raise httpx.UnsupportedProtocol(f"Scheme {request.url.scheme!r} not supported.")
if request.url.path == "/redirect_301":
status_code = httpx.codes.MOVED_PERMANENTLY
@@ -396,3 +393,10 @@ def test_redirect_custom_scheme():
with pytest.raises(httpx.UnsupportedProtocol) as e:
client.post("https://example.org/redirect_custom_scheme")
assert str(e.value) == "Scheme 'market' not supported."
+
+
+@pytest.mark.usefixtures("async_environment")
+async def test_async_invalid_redirect():
+ async with httpx.AsyncClient(transport=httpx.MockTransport(redirects)) as client:
+ with pytest.raises(httpx.RemoteProtocolError):
+ await client.get("http://example.org/invalid_redirect")
diff --git a/tests/conftest.py b/tests/conftest.py
index 12db1b0bb2..62c10c9fb4 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -76,8 +76,6 @@ async def app(scope, receive, send):
assert scope["type"] == "http"
if scope["path"].startswith("/slow_response"):
await slow_response(scope, receive, send)
- elif scope["path"].startswith("/slow_stream_response"):
- await slow_stream_response(scope, receive, send)
elif scope["path"].startswith("/status"):
await status_code(scope, receive, send)
elif scope["path"].startswith("/echo_body"):
@@ -113,19 +111,6 @@ async def slow_response(scope, receive, send):
await send({"type": "http.response.body", "body": b"Hello, world!"})
-async def slow_stream_response(scope, receive, send):
- await send(
- {
- "type": "http.response.start",
- "status": 200,
- "headers": [[b"content-type", b"text/plain"]],
- }
- )
-
- await sleep(1)
- await send({"type": "http.response.body", "body": b"", "more_body": False})
-
-
async def status_code(scope, receive, send):
status_code = int(scope["path"].replace("/status/", ""))
await send(
diff --git a/tests/models/test_queryparams.py b/tests/models/test_queryparams.py
index 7031a65cb9..ba200f146d 100644
--- a/tests/models/test_queryparams.py
+++ b/tests/models/test_queryparams.py
@@ -18,17 +18,17 @@ def test_queryparams(source):
assert "a" in q
assert "A" not in q
assert "c" not in q
- assert q["a"] == "456"
- assert q.get("a") == "456"
+ assert q["a"] == "123"
+ assert q.get("a") == "123"
assert q.get("nope", default=None) is None
assert q.get_list("a") == ["123", "456"]
assert list(q.keys()) == ["a", "b"]
- assert list(q.values()) == ["456", "789"]
- assert list(q.items()) == [("a", "456"), ("b", "789")]
+ assert list(q.values()) == ["123", "789"]
+ assert list(q.items()) == [("a", "123"), ("b", "789")]
assert len(q) == 2
assert list(q) == ["a", "b"]
- assert dict(q) == {"a": "456", "b": "789"}
+ assert dict(q) == {"a": "123", "b": "789"}
assert str(q) == "a=123&a=456&b=789"
assert repr(q) == "QueryParams('a=123&a=456&b=789')"
assert httpx.QueryParams({"a": "123", "b": "456"}) == httpx.QueryParams(
@@ -76,19 +76,50 @@ def test_queryparam_types():
assert str(q) == "a=1&a=2"
-def test_queryparam_setters():
- q = httpx.QueryParams({"a": 1})
- q.update([])
+def test_queryparam_update_is_hard_deprecated():
+ q = httpx.QueryParams("a=123")
+ with pytest.raises(RuntimeError):
+ q.update({"a": "456"})
- assert str(q) == "a=1"
- q = httpx.QueryParams([("a", 1), ("a", 2)])
- q["a"] = "3"
- assert str(q) == "a=3"
+def test_queryparam_setter_is_hard_deprecated():
+ q = httpx.QueryParams("a=123")
+ with pytest.raises(RuntimeError):
+ q["a"] = "456"
- q = httpx.QueryParams([("a", 1), ("b", 1)])
- u = httpx.QueryParams([("b", 2), ("b", 3)])
- q.update(u)
- assert str(q) == "a=1&b=2&b=3"
- assert q["b"] == u["b"]
+def test_queryparam_set():
+ q = httpx.QueryParams("a=123")
+ q = q.set("a", "456")
+ assert q == httpx.QueryParams("a=456")
+
+
+def test_queryparam_add():
+ q = httpx.QueryParams("a=123")
+ q = q.add("a", "456")
+ assert q == httpx.QueryParams("a=123&a=456")
+
+
+def test_queryparam_remove():
+ q = httpx.QueryParams("a=123")
+ q = q.remove("a")
+ assert q == httpx.QueryParams("")
+
+
+def test_queryparam_merge():
+ q = httpx.QueryParams("a=123")
+ q = q.merge({"b": "456"})
+ assert q == httpx.QueryParams("a=123&b=456")
+ q = q.merge({"a": "000", "c": "789"})
+ assert q == httpx.QueryParams("a=000&b=456&c=789")
+
+
+def test_queryparams_are_hashable():
+ params = (
+ httpx.QueryParams("a=123"),
+ httpx.QueryParams({"a": 123}),
+ httpx.QueryParams("b=456"),
+ httpx.QueryParams({"b": 456}),
+ )
+
+ assert len(set(params)) == 2
diff --git a/tests/models/test_requests.py b/tests/models/test_requests.py
index 8756a45985..a93e899458 100644
--- a/tests/models/test_requests.py
+++ b/tests/models/test_requests.py
@@ -1,3 +1,4 @@
+import pickle
import typing
import pytest
@@ -97,11 +98,12 @@ async def test_aread_and_stream_data():
assert content == request.content
-@pytest.mark.asyncio
-async def test_cannot_access_content_without_read():
- # Ensure a request may still be streamed if it has been read.
- # Needed for cases such as authentication classes that read the request body.
- request = httpx.Request("POST", "http://example.org", json={"test": 123})
+def test_cannot_access_streaming_content_without_read():
+ # Ensure that streaming requests
+ def streaming_body(): # pragma: nocover
+ yield ""
+
+ request = httpx.Request("POST", "http://example.org", content=streaming_body())
with pytest.raises(httpx.RequestNotRead):
request.content
@@ -112,7 +114,7 @@ async def streaming_body(data):
data = streaming_body(b"test 123")
- request = httpx.Request("POST", "http://example.org", data=data)
+ request = httpx.Request("POST", "http://example.org", content=data)
assert "Content-Length" not in request.headers
assert request.headers["Transfer-Encoding"] == "chunked"
@@ -129,7 +131,7 @@ def streaming_body(data):
data = streaming_body(b"abcd")
headers = {"Content-Length": "4"}
- request = httpx.Request("POST", "http://example.org", data=data, headers=headers)
+ request = httpx.Request("POST", "http://example.org", content=data, headers=headers)
assert "Transfer-Encoding" not in request.headers
assert request.headers["Content-Length"] == "4"
@@ -155,7 +157,7 @@ async def streaming_body(data):
data = streaming_body(b"test 123")
headers = {"Content-Length": "8"}
- request = httpx.Request("POST", "http://example.org", data=data, headers=headers)
+ request = httpx.Request("POST", "http://example.org", content=data, headers=headers)
assert request.headers["Content-Length"] == "8"
@@ -173,3 +175,54 @@ def test_url():
assert request.url.port is None
assert request.url.path == "/abc"
assert request.url.raw_path == b"/abc?foo=bar"
+
+
+def test_request_picklable():
+ request = httpx.Request("POST", "http://example.org", json={"test": 123})
+ pickle_request = pickle.loads(pickle.dumps(request))
+ assert pickle_request.method == "POST"
+ assert pickle_request.url.path == "/"
+ assert pickle_request.headers["Content-Type"] == "application/json"
+ assert pickle_request.content == b'{"test": 123}'
+ assert pickle_request.stream is not None
+ assert request.headers == {
+ "Host": "example.org",
+ "Content-Type": "application/json",
+ "content-length": "13",
+ }
+
+
+@pytest.mark.asyncio
+async def test_request_async_streaming_content_picklable():
+ async def streaming_body(data):
+ yield data
+
+ data = streaming_body(b"test 123")
+ request = httpx.Request("POST", "http://example.org", content=data)
+ pickle_request = pickle.loads(pickle.dumps(request))
+ with pytest.raises(httpx.RequestNotRead):
+ pickle_request.content
+ with pytest.raises(httpx.StreamClosed):
+ await pickle_request.aread()
+
+ request = httpx.Request("POST", "http://example.org", content=data)
+ await request.aread()
+ pickle_request = pickle.loads(pickle.dumps(request))
+ assert pickle_request.content == b"test 123"
+
+
+def test_request_generator_content_picklable():
+ def content():
+ yield b"test 123" # pragma: nocover
+
+ request = httpx.Request("POST", "http://example.org", content=content())
+ pickle_request = pickle.loads(pickle.dumps(request))
+ with pytest.raises(httpx.RequestNotRead):
+ pickle_request.content
+ with pytest.raises(httpx.StreamClosed):
+ pickle_request.read()
+
+ request = httpx.Request("POST", "http://example.org", content=content())
+ request.read()
+ pickle_request = pickle.loads(pickle.dumps(request))
+ assert pickle_request.content == b"test 123"
diff --git a/tests/models/test_responses.py b/tests/models/test_responses.py
index cb46719c17..5e2afc1bf3 100644
--- a/tests/models/test_responses.py
+++ b/tests/models/test_responses.py
@@ -1,4 +1,5 @@
import json
+import pickle
from unittest import mock
import brotli
@@ -382,6 +383,16 @@ def test_iter_raw_on_async():
[part for part in response.iter_raw()]
+def test_close_on_async():
+ response = httpx.Response(
+ 200,
+ content=async_streaming_body(),
+ )
+
+ with pytest.raises(RuntimeError):
+ response.close()
+
+
def test_iter_raw_increments_updates_counter():
response = httpx.Response(200, content=streaming_body())
@@ -430,6 +441,17 @@ async def test_aiter_raw_on_sync():
[part async for part in response.aiter_raw()]
+@pytest.mark.asyncio
+async def test_aclose_on_sync():
+ response = httpx.Response(
+ 200,
+ content=streaming_body(),
+ )
+
+ with pytest.raises(RuntimeError):
+ await response.aclose()
+
+
@pytest.mark.asyncio
async def test_aiter_raw_increments_updates_counter():
response = httpx.Response(200, content=async_streaming_body())
@@ -639,7 +661,7 @@ def test_cannot_read_after_response_closed():
)
response.close()
- with pytest.raises(httpx.ResponseClosed):
+ with pytest.raises(httpx.StreamClosed):
response.read()
@@ -651,7 +673,7 @@ async def test_cannot_aread_after_response_closed():
)
await response.aclose()
- with pytest.raises(httpx.ResponseClosed):
+ with pytest.raises(httpx.StreamClosed):
await response.aread()
@@ -733,7 +755,7 @@ def test_json_without_specified_encoding_value_error():
# force incorrect guess from `guess_json_utf` to trigger error
with mock.patch("httpx._models.guess_json_utf", return_value="utf-32"):
response = httpx.Response(200, content=content, headers=headers)
- with pytest.raises(ValueError):
+ with pytest.raises(json.decoder.JSONDecodeError):
response.json()
@@ -767,7 +789,7 @@ def test_decode_error_with_request(header_value):
headers = [(b"Content-Encoding", header_value)]
body = b"test 123"
compressed_body = brotli.compress(body)[3:]
- with pytest.raises(ValueError):
+ with pytest.raises(httpx.DecodingError):
httpx.Response(
200,
headers=headers,
@@ -788,7 +810,7 @@ def test_value_error_without_request(header_value):
headers = [(b"Content-Encoding", header_value)]
body = b"test 123"
compressed_body = brotli.compress(body)[3:]
- with pytest.raises(ValueError):
+ with pytest.raises(httpx.DecodingError):
httpx.Response(200, headers=headers, content=compressed_body)
@@ -832,3 +854,41 @@ def content():
headers = {"Content-Length": "8"}
response = httpx.Response(200, content=content(), headers=headers)
assert response.headers == {"Content-Length": "8"}
+
+
+def test_response_picklable():
+ response = httpx.Response(
+ 200,
+ content=b"Hello, world!",
+ request=httpx.Request("GET", "https://example.org"),
+ )
+ pickle_response = pickle.loads(pickle.dumps(response))
+ assert pickle_response.is_closed is True
+ assert pickle_response.is_stream_consumed is True
+ assert pickle_response.next_request is None
+ assert pickle_response.stream is not None
+ assert pickle_response.content == b"Hello, world!"
+ assert pickle_response.status_code == 200
+ assert pickle_response.request.url == response.request.url
+ assert pickle_response.extensions == {}
+ assert pickle_response.history == []
+
+
+@pytest.mark.asyncio
+async def test_response_async_streaming_picklable():
+ response = httpx.Response(200, content=async_streaming_body())
+ pickle_response = pickle.loads(pickle.dumps(response))
+ with pytest.raises(httpx.ResponseNotRead):
+ pickle_response.content
+ with pytest.raises(httpx.StreamClosed):
+ await pickle_response.aread()
+ assert pickle_response.is_stream_consumed is False
+ assert pickle_response.num_bytes_downloaded == 0
+ assert pickle_response.headers == {"Transfer-Encoding": "chunked"}
+
+ response = httpx.Response(200, content=async_streaming_body())
+ await response.aread()
+ pickle_response = pickle.loads(pickle.dumps(response))
+ assert pickle_response.is_stream_consumed is True
+ assert pickle_response.content == b"Hello, world!"
+ assert pickle_response.num_bytes_downloaded == 13
diff --git a/tests/models/test_url.py b/tests/models/test_url.py
index 9d67618b5b..cd099bd931 100644
--- a/tests/models/test_url.py
+++ b/tests/models/test_url.py
@@ -4,41 +4,53 @@
@pytest.mark.parametrize(
- "given,idna,host,scheme,port",
+ "given,idna,host,raw_host,scheme,port",
[
(
"http://中国.icom.museum:80/",
"http://xn--fiqs8s.icom.museum:80/",
- "xn--fiqs8s.icom.museum",
+ "中国.icom.museum",
+ b"xn--fiqs8s.icom.museum",
"http",
- 80,
+ None,
),
(
"http://Königsgäßchen.de",
"http://xn--knigsgchen-b4a3dun.de",
- "xn--knigsgchen-b4a3dun.de",
+ "königsgäßchen.de",
+ b"xn--knigsgchen-b4a3dun.de",
"http",
None,
),
- ("https://faß.de", "https://xn--fa-hia.de", "xn--fa-hia.de", "https", None),
+ (
+ "https://faß.de",
+ "https://xn--fa-hia.de",
+ "faß.de",
+ b"xn--fa-hia.de",
+ "https",
+ None,
+ ),
(
"https://βόλος.com:443",
"https://xn--nxasmm1c.com:443",
- "xn--nxasmm1c.com",
+ "βόλος.com",
+ b"xn--nxasmm1c.com",
"https",
- 443,
+ None,
),
(
"http://ශ්රී.com:444",
"http://xn--10cl1a0b660p.com:444",
- "xn--10cl1a0b660p.com",
+ "ශ්රී.com",
+ b"xn--10cl1a0b660p.com",
"http",
444,
),
(
"https://نامهای.com:4433",
"https://xn--mgba3gch31f060k.com:4433",
- "xn--mgba3gch31f060k.com",
+ "نامهای.com",
+ b"xn--mgba3gch31f060k.com",
"https",
4433,
),
@@ -52,10 +64,11 @@
"https_with_custom_port",
],
)
-def test_idna_url(given, idna, host, scheme, port):
+def test_idna_url(given, idna, host, raw_host, scheme, port):
url = httpx.URL(given)
assert url == httpx.URL(idna)
assert url.host == host
+ assert url.raw_host == raw_host
assert url.scheme == scheme
assert url.port == port
@@ -87,11 +100,13 @@ def test_url_eq_str():
def test_url_params():
url = httpx.URL("https://example.org:123/path/to/somewhere", params={"a": "123"})
assert str(url) == "https://example.org:123/path/to/somewhere?a=123"
+ assert url.params == httpx.QueryParams({"a": "123"})
url = httpx.URL(
"https://example.org:123/path/to/somewhere?b=456", params={"a": "123"}
)
- assert str(url) == "https://example.org:123/path/to/somewhere?b=456&a=123"
+ assert str(url) == "https://example.org:123/path/to/somewhere?a=123"
+ assert url.params == httpx.QueryParams({"a": "123"})
def test_url_join():
@@ -109,6 +124,46 @@ def test_url_join():
assert url.join("../../somewhere-else") == "https://example.org:123/somewhere-else"
+def test_url_set_param_manipulation():
+ """
+ Some basic URL query parameter manipulation.
+ """
+ url = httpx.URL("https://example.org:123/?a=123")
+ assert url.copy_set_param("a", "456") == "https://example.org:123/?a=456"
+
+
+def test_url_add_param_manipulation():
+ """
+ Some basic URL query parameter manipulation.
+ """
+ url = httpx.URL("https://example.org:123/?a=123")
+ assert url.copy_add_param("a", "456") == "https://example.org:123/?a=123&a=456"
+
+
+def test_url_remove_param_manipulation():
+ """
+ Some basic URL query parameter manipulation.
+ """
+ url = httpx.URL("https://example.org:123/?a=123")
+ assert url.copy_remove_param("a") == "https://example.org:123/"
+
+
+def test_url_merge_params_manipulation():
+ """
+ Some basic URL query parameter manipulation.
+ """
+ url = httpx.URL("https://example.org:123/?a=123")
+ assert url.copy_merge_params({"b": "456"}) == "https://example.org:123/?a=123&b=456"
+
+
+def test_relative_url_join():
+ url = httpx.URL("/path/to/somewhere")
+ assert url.join("/somewhere-else") == "/somewhere-else"
+ assert url.join("somewhere-else") == "/path/to/somewhere-else"
+ assert url.join("../somewhere-else") == "/path/somewhere-else"
+ assert url.join("../../somewhere-else") == "/somewhere-else"
+
+
def test_url_join_rfc3986():
"""
URL joining tests, as-per reference examples in RFC 3986.
@@ -189,7 +244,7 @@ def test_url_copywith_authority_subcomponents():
def test_url_copywith_netloc():
copy_with_kwargs = {
- "netloc": "example.net:444",
+ "netloc": b"example.net:444",
}
url = httpx.URL("https://example.org")
new = url.copy_with(**copy_with_kwargs)
@@ -293,7 +348,7 @@ def test_ipv6_url():
url = httpx.URL("http://[::ffff:192.168.0.1]:5678/")
assert url.host == "::ffff:192.168.0.1"
- assert url.netloc == "[::ffff:192.168.0.1]:5678"
+ assert url.netloc == b"[::ffff:192.168.0.1]:5678"
@pytest.mark.parametrize(
@@ -309,7 +364,7 @@ def test_ipv6_url_copy_with_host(url_str, new_host):
url = httpx.URL(url_str).copy_with(host=new_host)
assert url.host == "::ffff:192.168.0.1"
- assert url.netloc == "[::ffff:192.168.0.1]:1234"
+ assert url.netloc == b"[::ffff:192.168.0.1]:1234"
assert str(url) == "http://[::ffff:192.168.0.1]:1234"
@@ -319,5 +374,5 @@ def test_ipv6_url_from_raw_url(host):
url = httpx.URL(raw_url)
assert url.host == "::ffff:192.168.0.1"
- assert url.netloc == "[::ffff:192.168.0.1]:443"
- assert str(url) == "https://[::ffff:192.168.0.1]:443/"
+ assert url.netloc == b"[::ffff:192.168.0.1]"
+ assert str(url) == "https://[::ffff:192.168.0.1]/"
diff --git a/tests/test_asgi.py b/tests/test_asgi.py
index b16f68246c..d7cf9412af 100644
--- a/tests/test_asgi.py
+++ b/tests/test_asgi.py
@@ -70,6 +70,42 @@ async def raise_exc_after_response(scope, receive, send):
raise RuntimeError()
+async def empty_stream():
+ yield b""
+
+
+@pytest.mark.usefixtures("async_environment")
+async def test_asgi_transport():
+ async with httpx.ASGITransport(app=hello_world) as transport:
+ status_code, headers, stream, ext = await transport.handle_async_request(
+ method=b"GET",
+ url=(b"http", b"www.example.org", 80, b"/"),
+ headers=[(b"Host", b"www.example.org")],
+ stream=empty_stream(),
+ extensions={},
+ )
+ body = b"".join([part async for part in stream])
+
+ assert status_code == 200
+ assert body == b"Hello, World!"
+
+
+@pytest.mark.usefixtures("async_environment")
+async def test_asgi_transport_no_body():
+ async with httpx.ASGITransport(app=echo_body) as transport:
+ status_code, headers, stream, ext = await transport.handle_async_request(
+ method=b"GET",
+ url=(b"http", b"www.example.org", 80, b"/"),
+ headers=[(b"Host", b"www.example.org")],
+ stream=empty_stream(),
+ extensions={},
+ )
+ body = b"".join([part async for part in stream])
+
+ assert status_code == 200
+ assert body == b""
+
+
@pytest.mark.usefixtures("async_environment")
async def test_asgi():
async with httpx.AsyncClient(app=hello_world) as client:
diff --git a/tests/test_content.py b/tests/test_content.py
index 384f9f2287..b105966198 100644
--- a/tests/test_content.py
+++ b/tests/test_content.py
@@ -3,18 +3,18 @@
import pytest
-from httpx import StreamConsumed
+import httpx
from httpx._content import encode_request, encode_response
@pytest.mark.asyncio
async def test_empty_content():
headers, stream = encode_request()
- assert isinstance(stream, typing.Iterable)
- assert isinstance(stream, typing.AsyncIterable)
+ assert isinstance(stream, httpx.SyncByteStream)
+ assert isinstance(stream, httpx.AsyncByteStream)
- sync_content = b"".join([part for part in stream])
- async_content = b"".join([part async for part in stream])
+ sync_content = stream.read()
+ async_content = await stream.aread()
assert headers == {}
assert sync_content == b""
@@ -35,7 +35,8 @@ async def test_bytes_content():
assert async_content == b"Hello, world!"
# Support 'data' for compat with requests.
- headers, stream = encode_request(data=b"Hello, world!") # type: ignore
+ with pytest.warns(DeprecationWarning):
+ headers, stream = encode_request(data=b"Hello, world!") # type: ignore
assert isinstance(stream, typing.Iterable)
assert isinstance(stream, typing.AsyncIterable)
@@ -62,11 +63,12 @@ def hello_world():
assert headers == {"Transfer-Encoding": "chunked"}
assert content == b"Hello, world!"
- with pytest.raises(StreamConsumed):
+ with pytest.raises(httpx.StreamConsumed):
[part for part in stream]
# Support 'data' for compat with requests.
- headers, stream = encode_request(data=hello_world()) # type: ignore
+ with pytest.warns(DeprecationWarning):
+ headers, stream = encode_request(data=hello_world()) # type: ignore
assert isinstance(stream, typing.Iterable)
assert not isinstance(stream, typing.AsyncIterable)
@@ -91,11 +93,12 @@ async def hello_world():
assert headers == {"Transfer-Encoding": "chunked"}
assert content == b"Hello, world!"
- with pytest.raises(StreamConsumed):
+ with pytest.raises(httpx.StreamConsumed):
[part async for part in stream]
# Support 'data' for compat with requests.
- headers, stream = encode_request(data=hello_world()) # type: ignore
+ with pytest.warns(DeprecationWarning):
+ headers, stream = encode_request(data=hello_world()) # type: ignore
assert not isinstance(stream, typing.Iterable)
assert isinstance(stream, typing.AsyncIterable)
@@ -139,6 +142,57 @@ async def test_urlencoded_content():
assert async_content == b"Hello=world%21"
+@pytest.mark.asyncio
+async def test_urlencoded_boolean():
+ headers, stream = encode_request(data={"example": True})
+ assert isinstance(stream, typing.Iterable)
+ assert isinstance(stream, typing.AsyncIterable)
+
+ sync_content = b"".join([part for part in stream])
+ async_content = b"".join([part async for part in stream])
+
+ assert headers == {
+ "Content-Length": "12",
+ "Content-Type": "application/x-www-form-urlencoded",
+ }
+ assert sync_content == b"example=true"
+ assert async_content == b"example=true"
+
+
+@pytest.mark.asyncio
+async def test_urlencoded_none():
+ headers, stream = encode_request(data={"example": None})
+ assert isinstance(stream, typing.Iterable)
+ assert isinstance(stream, typing.AsyncIterable)
+
+ sync_content = b"".join([part for part in stream])
+ async_content = b"".join([part async for part in stream])
+
+ assert headers == {
+ "Content-Length": "8",
+ "Content-Type": "application/x-www-form-urlencoded",
+ }
+ assert sync_content == b"example="
+ assert async_content == b"example="
+
+
+@pytest.mark.asyncio
+async def test_urlencoded_list():
+ headers, stream = encode_request(data={"example": ["a", 1, True]})
+ assert isinstance(stream, typing.Iterable)
+ assert isinstance(stream, typing.AsyncIterable)
+
+ sync_content = b"".join([part for part in stream])
+ async_content = b"".join([part async for part in stream])
+
+ assert headers == {
+ "Content-Length": "32",
+ "Content-Type": "application/x-www-form-urlencoded",
+ }
+ assert sync_content == b"example=a&example=1&example=true"
+ assert async_content == b"example=a&example=1&example=true"
+
+
@pytest.mark.asyncio
async def test_multipart_files_content():
files = {"file": io.BytesIO(b"")}
@@ -331,7 +385,7 @@ def hello_world():
assert headers == {"Transfer-Encoding": "chunked"}
assert content == b"Hello, world!"
- with pytest.raises(StreamConsumed):
+ with pytest.raises(httpx.StreamConsumed):
[part for part in stream]
@@ -350,7 +404,7 @@ async def hello_world():
assert headers == {"Transfer-Encoding": "chunked"}
assert content == b"Hello, world!"
- with pytest.raises(StreamConsumed):
+ with pytest.raises(httpx.StreamConsumed):
[part async for part in stream]
diff --git a/tests/test_decoders.py b/tests/test_decoders.py
index f8c432cc89..faaf71d2fb 100644
--- a/tests/test_decoders.py
+++ b/tests/test_decoders.py
@@ -170,7 +170,7 @@ def test_decoding_errors(header_value):
request = httpx.Request("GET", "https://example.org")
httpx.Response(200, headers=headers, content=compressed_body, request=request)
- with pytest.raises(ValueError):
+ with pytest.raises(httpx.DecodingError):
httpx.Response(200, headers=headers, content=compressed_body)
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py
index f1c7005bba..8d28fda0d7 100644
--- a/tests/test_exceptions.py
+++ b/tests/test_exceptions.py
@@ -1,10 +1,10 @@
-from typing import Any
+from unittest import mock
import httpcore
import pytest
import httpx
-from httpx._exceptions import HTTPCORE_EXC_MAP
+from httpx._transports.default import HTTPCORE_EXC_MAP
def test_httpcore_all_exceptions_mapped() -> None:
@@ -29,25 +29,42 @@ def test_httpcore_exception_mapping(server) -> None:
HTTPCore exception mapping works as expected.
"""
- # Make sure we don't just map to `NetworkError`.
- with pytest.raises(httpx.ConnectError):
- httpx.get("http://doesnotexist")
+ def connect_failed(*args, **kwargs):
+ raise httpcore.ConnectError()
- # Make sure streaming methods also map exceptions.
- url = server.url.copy_with(path="/slow_stream_response")
- timeout = httpx.Timeout(None, read=0.1)
- with httpx.stream("GET", url, timeout=timeout) as stream:
- with pytest.raises(httpx.ReadTimeout):
- stream.read()
+ class TimeoutStream:
+ def __iter__(self):
+ raise httpcore.ReadTimeout()
+
+ def close(self):
+ pass
+
+ class CloseFailedStream:
+ def __iter__(self):
+ yield b""
- # Make sure it also works with custom transports.
- class MockTransport(httpcore.SyncHTTPTransport):
- def request(self, *args: Any, **kwargs: Any) -> Any:
- raise httpcore.ProtocolError()
+ def close(self):
+ raise httpcore.CloseError()
- client = httpx.Client(transport=MockTransport())
- with pytest.raises(httpx.ProtocolError):
- client.get("http://testserver")
+ with mock.patch(
+ "httpcore.SyncConnectionPool.handle_request", side_effect=connect_failed
+ ):
+ with pytest.raises(httpx.ConnectError):
+ httpx.get(server.url)
+
+ with mock.patch(
+ "httpcore.SyncConnectionPool.handle_request",
+ return_value=(200, [], TimeoutStream(), {}),
+ ):
+ with pytest.raises(httpx.ReadTimeout):
+ httpx.get(server.url)
+
+ with mock.patch(
+ "httpcore.SyncConnectionPool.handle_request",
+ return_value=(200, [], CloseFailedStream(), {}),
+ ):
+ with pytest.raises(httpx.CloseError):
+ httpx.get(server.url)
def test_httpx_exceptions_exposed() -> None:
@@ -66,3 +83,15 @@ def test_httpx_exceptions_exposed() -> None:
if not_exposed: # pragma: nocover
pytest.fail(f"Unexposed HTTPX exceptions: {not_exposed}")
+
+
+def test_request_attribute() -> None:
+ # Exception without request attribute
+ exc = httpx.ReadTimeout("Read operation timed out")
+ with pytest.raises(RuntimeError):
+ exc.request
+
+ # Exception with request attribute
+ request = httpx.Request("GET", "https://www.example.com")
+ exc = httpx.ReadTimeout("Read operation timed out", request=request)
+ assert exc.request == request
diff --git a/tests/test_multipart.py b/tests/test_multipart.py
index 94813932a8..9eb62f785b 100644
--- a/tests/test_multipart.py
+++ b/tests/test_multipart.py
@@ -57,7 +57,7 @@ def test_multipart_invalid_key(key):
assert repr(key) in str(e.value)
-@pytest.mark.parametrize(("value"), (1, 2.3, None, [None, "abc"], {None: "abc"}))
+@pytest.mark.parametrize(("value"), (object(), {"key": "value"}))
def test_multipart_invalid_value(value):
client = httpx.Client(transport=httpx.MockTransport(echo_request_content))
@@ -104,6 +104,8 @@ def test_multipart_encode(tmp_path: typing.Any) -> None:
"b": b"C",
"c": ["11", "22", "33"],
"d": "",
+ "e": True,
+ "f": "",
}
files = {"file": ("name.txt", open(path, "rb"))}
@@ -120,6 +122,8 @@ def test_multipart_encode(tmp_path: typing.Any) -> None:
'--{0}\r\nContent-Disposition: form-data; name="c"\r\n\r\n22\r\n'
'--{0}\r\nContent-Disposition: form-data; name="c"\r\n\r\n33\r\n'
'--{0}\r\nContent-Disposition: form-data; name="d"\r\n\r\n\r\n'
+ '--{0}\r\nContent-Disposition: form-data; name="e"\r\n\r\ntrue\r\n'
+ '--{0}\r\nContent-Disposition: form-data; name="f"\r\n\r\n\r\n'
'--{0}\r\nContent-Disposition: form-data; name="file";'
' filename="name.txt"\r\n'
"Content-Type: text/plain\r\n\r\n\r\n"
@@ -133,6 +137,29 @@ def test_multipart_encode(tmp_path: typing.Any) -> None:
assert content == b"".join(stream)
+def test_multipart_encode_unicode_file_contents() -> None:
+ files = {"file": ("name.txt", "<únicode string>")}
+
+ with mock.patch("os.urandom", return_value=os.urandom(16)):
+ boundary = os.urandom(16).hex()
+
+ headers, stream = encode_request(files=files)
+ assert isinstance(stream, typing.Iterable)
+
+ content = (
+ '--{0}\r\nContent-Disposition: form-data; name="file";'
+ ' filename="name.txt"\r\n'
+ "Content-Type: text/plain\r\n\r\n<únicode string>\r\n"
+ "--{0}--\r\n"
+ "".format(boundary).encode("utf-8")
+ )
+ assert headers == {
+ "Content-Type": f"multipart/form-data; boundary={boundary}",
+ "Content-Length": str(len(content)),
+ }
+ assert content == b"".join(stream)
+
+
def test_multipart_encode_files_allows_filenames_as_none() -> None:
files = {"file": (None, io.BytesIO(b""))}
with mock.patch("os.urandom", return_value=os.urandom(16)):
diff --git a/tests/test_status_codes.py b/tests/test_status_codes.py
index 722e83c527..f253cecd68 100644
--- a/tests/test_status_codes.py
+++ b/tests/test_status_codes.py
@@ -1,5 +1,3 @@
-import pytest
-
import httpx
@@ -26,14 +24,3 @@ def test_reason_phrase_for_status_code():
def test_reason_phrase_for_unknown_status_code():
assert httpx.codes.get_reason_phrase(499) == ""
-
-
-def test_deprecated_status_code_class():
- with pytest.warns(DeprecationWarning):
- assert httpx.StatusCode.NOT_FOUND == 404
-
- with pytest.warns(DeprecationWarning):
- assert httpx.StatusCode(404) == 404
-
- with pytest.warns(DeprecationWarning):
- assert httpx.StatusCode["NOT_FOUND"] == 404