From c8b8a50b8151a8f0d2723a0aafd7a9bbc9971cbd Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 15 Aug 2020 09:57:14 -0700 Subject: [PATCH 001/193] Bump version to 0.22.0.dev0 --- asyncpg/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/__init__.py b/asyncpg/__init__.py index 13341e85..d06718a5 100644 --- a/asyncpg/__init__.py +++ b/asyncpg/__init__.py @@ -31,4 +31,4 @@ # snapshots will automatically include the git revision # in __version__, for example: '0.16.0.dev0+ge06ad03' -__version__ = '0.21.0' +__version__ = '0.22.0.dev0' From db4f1a6caa87779b7264f285116381aa943ddf0e Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 18 Jul 2020 14:07:38 -0700 Subject: [PATCH 002/193] Allow using custom Record class Add the new `record_class` parameter to the `create_pool()` and `connect()` functions, as well as to the `cursor()`, `prepare()`, `fetch()` and `fetchrow()` connection methods. This not only allows adding custom functionality to the returned objects, but also assists with typing (see #577 for discussion). Fixes: #40. --- .flake8 | 2 +- asyncpg/_testbase/__init__.py | 3 + asyncpg/connect_utils.py | 27 ++- asyncpg/connection.py | 303 ++++++++++++++++++++++----- asyncpg/cursor.py | 82 ++++++-- asyncpg/pool.py | 25 ++- asyncpg/prepared_stmt.py | 12 +- asyncpg/protocol/codecs/base.pyx | 3 +- asyncpg/protocol/prepared_stmt.pxd | 2 + asyncpg/protocol/prepared_stmt.pyx | 11 +- asyncpg/protocol/protocol.pxd | 1 + asyncpg/protocol/protocol.pyx | 15 +- asyncpg/protocol/record/__init__.pxd | 2 +- asyncpg/protocol/record/recordobj.c | 47 +++-- asyncpg/protocol/record/recordobj.h | 2 +- tests/test_record.py | 174 +++++++++++++++ tests/test_timeout.py | 4 +- 17 files changed, 610 insertions(+), 105 deletions(-) diff --git a/.flake8 b/.flake8 index 7cf64d1f..9697fc96 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,3 @@ [flake8] -ignore = E402,E731,W504,E252 +ignore = E402,E731,W503,W504,E252 exclude = .git,__pycache__,build,dist,.eggs,.github,.local diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py index baf55c1b..ce7f827f 100644 --- a/asyncpg/_testbase/__init__.py +++ b/asyncpg/_testbase/__init__.py @@ -19,6 +19,7 @@ import unittest +import asyncpg from asyncpg import cluster as pg_cluster from asyncpg import connection as pg_connection from asyncpg import pool as pg_pool @@ -266,6 +267,7 @@ def create_pool(dsn=None, *, loop=None, pool_class=pg_pool.Pool, connection_class=pg_connection.Connection, + record_class=asyncpg.Record, **connect_kwargs): return pool_class( dsn, @@ -273,6 +275,7 @@ def create_pool(dsn=None, *, max_queries=max_queries, loop=loop, setup=setup, init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, connection_class=connection_class, + record_class=record_class, **connect_kwargs) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 2678b358..e5feebc2 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -594,8 +594,16 @@ async def _create_ssl_connection(protocol_factory, host, port, *, raise -async def _connect_addr(*, addr, loop, timeout, params, config, - connection_class): +async def _connect_addr( + *, + addr, + loop, + timeout, + params, + config, + connection_class, + record_class +): assert loop is not None if timeout <= 0: @@ -613,7 +621,7 @@ async def _connect_addr(*, addr, loop, timeout, params, config, params = params._replace(password=password) proto_factory = lambda: protocol.Protocol( - addr, connected, params, loop) + addr, connected, params, record_class, loop) if isinstance(addr, str): # UNIX socket @@ -649,7 +657,7 @@ async def _connect_addr(*, addr, loop, timeout, params, config, return con -async def _connect(*, loop, timeout, connection_class, **kwargs): +async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): if loop is None: loop = asyncio.get_event_loop() @@ -661,9 +669,14 @@ async def _connect(*, loop, timeout, connection_class, **kwargs): before = time.monotonic() try: con = await _connect_addr( - addr=addr, loop=loop, timeout=timeout, - params=params, config=config, - connection_class=connection_class) + addr=addr, + loop=loop, + timeout=timeout, + params=params, + config=config, + connection_class=connection_class, + record_class=record_class, + ) except (OSError, asyncio.TimeoutError, ConnectionError) as ex: last_error = ex else: diff --git a/asyncpg/connection.py b/asyncpg/connection.py index a78aafa7..76c33a9d 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -50,7 +50,7 @@ class Connection(metaclass=ConnectionMeta): '_source_traceback', '__weakref__') def __init__(self, protocol, transport, loop, - addr: (str, int) or str, + addr, config: connect_utils._ClientConfiguration, params: connect_utils._ConnectionParameters): self._protocol = protocol @@ -294,7 +294,13 @@ async def execute(self, query: str, *args, timeout: float=None) -> str: if not args: return await self._protocol.query(query, timeout) - _, status, _ = await self._execute(query, args, 0, timeout, True) + _, status, _ = await self._execute( + query, + args, + 0, + timeout, + return_status=True, + ) return status.decode() async def executemany(self, command: str, args, *, timeout: float=None): @@ -327,10 +333,20 @@ async def executemany(self, command: str, args, *, timeout: float=None): self._check_open() return await self._executemany(command, args, timeout) - async def _get_statement(self, query, timeout, *, named: bool=False, - use_cache: bool=True): + async def _get_statement( + self, + query, + timeout, + *, + named: bool=False, + use_cache: bool=True, + record_class=None + ): + if record_class is None: + record_class = self._protocol.get_record_class() + if use_cache: - statement = self._stmt_cache.get(query) + statement = self._stmt_cache.get((query, record_class)) if statement is not None: return statement @@ -348,7 +364,12 @@ async def _get_statement(self, query, timeout, *, named: bool=False, else: stmt_name = '' - statement = await self._protocol.prepare(stmt_name, query, timeout) + statement = await self._protocol.prepare( + stmt_name, + query, + timeout, + record_class=record_class, + ) need_reprepare = False types_with_missing_codecs = statement._init_types() tries = 0 @@ -384,10 +405,15 @@ async def _get_statement(self, query, timeout, *, named: bool=False, if need_reprepare: await self._protocol.prepare( - stmt_name, query, timeout, state=statement) + stmt_name, + query, + timeout, + state=statement, + record_class=record_class, + ) if use_cache: - self._stmt_cache.put(query, statement) + self._stmt_cache.put((query, record_class), statement) # If we've just created a new statement object, check if there # are any statements for GC. @@ -400,47 +426,124 @@ async def _introspect_types(self, typeoids, timeout): return await self.__execute( self._intro_query, (list(typeoids),), 0, timeout) - def cursor(self, query, *args, prefetch=None, timeout=None): + def cursor( + self, + query, + *args, + prefetch=None, + timeout=None, + record_class=None + ): """Return a *cursor factory* for the specified query. - :param args: Query arguments. - :param int prefetch: The number of rows the *cursor iterator* - will prefetch (defaults to ``50``.) - :param float timeout: Optional timeout in seconds. + :param args: + Query arguments. + :param int prefetch: + The number of rows the *cursor iterator* + will prefetch (defaults to ``50``.) + :param float timeout: + Optional timeout in seconds. + :param type record_class: + If specified, the class to use for records returned by this cursor. + Must be a subclass of :class:`~asyncpg.Record`. If not specified, + a per-connection *record_class* is used. + + :return: + A :class:`~cursor.CursorFactory` object. - :return: A :class:`~cursor.CursorFactory` object. + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. """ self._check_open() - return cursor.CursorFactory(self, query, None, args, - prefetch, timeout) + return cursor.CursorFactory( + self, + query, + None, + args, + prefetch, + timeout, + record_class, + ) - async def prepare(self, query, *, timeout=None): + async def prepare(self, query, *, timeout=None, record_class=None): """Create a *prepared statement* for the specified query. - :param str query: Text of the query to create a prepared statement for. - :param float timeout: Optional timeout value in seconds. + :param str query: + Text of the query to create a prepared statement for. + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for records returned by the + prepared statement. Must be a subclass of + :class:`~asyncpg.Record`. If not specified, a per-connection + *record_class* is used. - :return: A :class:`~prepared_stmt.PreparedStatement` instance. + :return: + A :class:`~prepared_stmt.PreparedStatement` instance. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. """ - return await self._prepare(query, timeout=timeout, use_cache=False) + return await self._prepare( + query, + timeout=timeout, + use_cache=False, + record_class=record_class, + ) - async def _prepare(self, query, *, timeout=None, use_cache: bool=False): + async def _prepare( + self, + query, + *, + timeout=None, + use_cache: bool=False, + record_class=None + ): self._check_open() - stmt = await self._get_statement(query, timeout, named=True, - use_cache=use_cache) + stmt = await self._get_statement( + query, + timeout, + named=True, + use_cache=use_cache, + record_class=record_class, + ) return prepared_stmt.PreparedStatement(self, query, stmt) - async def fetch(self, query, *args, timeout=None) -> list: + async def fetch( + self, + query, + *args, + timeout=None, + record_class=None + ) -> list: """Run a query and return the results as a list of :class:`Record`. - :param str query: Query text. - :param args: Query arguments. - :param float timeout: Optional timeout value in seconds. + :param str query: + Query text. + :param args: + Query arguments. + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for records returned by this method. + Must be a subclass of :class:`~asyncpg.Record`. If not specified, + a per-connection *record_class* is used. - :return list: A list of :class:`Record` instances. + :return list: + A list of :class:`~asyncpg.Record` instances. If specified, the + actual type of list elements would be *record_class*. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. """ self._check_open() - return await self._execute(query, args, 0, timeout) + return await self._execute( + query, + args, + 0, + timeout, + record_class=record_class, + ) async def fetchval(self, query, *args, column=0, timeout=None): """Run a query and return a value in the first row. @@ -463,18 +566,42 @@ async def fetchval(self, query, *args, column=0, timeout=None): return None return data[0][column] - async def fetchrow(self, query, *args, timeout=None): + async def fetchrow( + self, + query, + *args, + timeout=None, + record_class=None + ): """Run a query and return the first row. - :param str query: Query text - :param args: Query arguments - :param float timeout: Optional timeout value in seconds. - - :return: The first row as a :class:`Record` instance, or None if - no records were returned by the query. + :param str query: + Query text + :param args: + Query arguments + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for the value returned by this + method. Must be a subclass of :class:`~asyncpg.Record`. + If not specified, a per-connection *record_class* is used. + + :return: + The first row as a :class:`~asyncpg.Record` instance, or None if + no records were returned by the query. If specified, + *record_class* is used as the type for the result value. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. """ self._check_open() - data = await self._execute(query, args, 1, timeout) + data = await self._execute( + query, + args, + 1, + timeout, + record_class=record_class, + ) if not data: return None return data[0] @@ -1185,7 +1312,10 @@ def _mark_stmts_as_closed(self): self._stmts_to_close.clear() def _maybe_gc_stmt(self, stmt): - if stmt.refs == 0 and not self._stmt_cache.has(stmt.query): + if ( + stmt.refs == 0 + and not self._stmt_cache.has((stmt.query, stmt.record_class)) + ): # If low-level `stmt` isn't referenced from any high-level # `PreparedStatement` object and is not in the `_stmt_cache`: # @@ -1440,18 +1570,46 @@ async def reload_schema_state(self): self._drop_global_type_cache() self._drop_global_statement_cache() - async def _execute(self, query, args, limit, timeout, return_status=False): + async def _execute( + self, + query, + args, + limit, + timeout, + *, + return_status=False, + record_class=None + ): with self._stmt_exclusive_section: result, _ = await self.__execute( - query, args, limit, timeout, return_status=return_status) + query, + args, + limit, + timeout, + return_status=return_status, + record_class=record_class, + ) return result - async def __execute(self, query, args, limit, timeout, - return_status=False): + async def __execute( + self, + query, + args, + limit, + timeout, + *, + return_status=False, + record_class=None + ): executor = lambda stmt, timeout: self._protocol.bind_execute( stmt, args, '', limit, return_status, timeout) timeout = self._protocol._get_timeout(timeout) - return await self._do_execute(query, executor, timeout) + return await self._do_execute( + query, + executor, + timeout, + record_class=record_class, + ) async def _executemany(self, query, args, timeout): executor = lambda stmt, timeout: self._protocol.bind_execute_many( @@ -1461,12 +1619,28 @@ async def _executemany(self, query, args, timeout): result, _ = await self._do_execute(query, executor, timeout) return result - async def _do_execute(self, query, executor, timeout, retry=True): + async def _do_execute( + self, + query, + executor, + timeout, + retry=True, + *, + record_class=None + ): if timeout is None: - stmt = await self._get_statement(query, None) + stmt = await self._get_statement( + query, + None, + record_class=record_class, + ) else: before = time.monotonic() - stmt = await self._get_statement(query, timeout) + stmt = await self._get_statement( + query, + timeout, + record_class=record_class, + ) after = time.monotonic() timeout -= after - before before = after @@ -1535,6 +1709,7 @@ async def connect(dsn=None, *, command_timeout=None, ssl=None, connection_class=Connection, + record_class=protocol.Record, server_settings=None): r"""A coroutine to establish a connection to a PostgreSQL server. @@ -1654,10 +1829,15 @@ async def connect(dsn=None, *, PostgreSQL documentation for a `list of supported options `_. - :param Connection connection_class: + :param type connection_class: Class of the returned connection object. Must be a subclass of :class:`~asyncpg.connection.Connection`. + :param type record_class: + If specified, the class to use for records returned by queries on + this connection object. Must be a subclass of + :class:`~asyncpg.Record`. + :return: A :class:`~asyncpg.connection.Connection` instance. Example: @@ -1696,6 +1876,9 @@ async def connect(dsn=None, *, .. versionchanged:: 0.21.0 The *password* argument now accepts a callable or an async function. + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context @@ -1712,19 +1895,33 @@ async def connect(dsn=None, *, 'connection_class is expected to be a subclass of ' 'asyncpg.Connection, got {!r}'.format(connection_class)) + if not issubclass(record_class, protocol.Record): + raise TypeError( + 'record_class is expected to be a subclass of ' + 'asyncpg.Record, got {!r}'.format(record_class)) + if loop is None: loop = asyncio.get_event_loop() return await connect_utils._connect( - loop=loop, timeout=timeout, connection_class=connection_class, - dsn=dsn, host=host, port=port, user=user, - password=password, passfile=passfile, - ssl=ssl, database=database, + loop=loop, + timeout=timeout, + connection_class=connection_class, + record_class=record_class, + dsn=dsn, + host=host, + port=port, + user=user, + password=password, + passfile=passfile, + ssl=ssl, + database=database, server_settings=server_settings, command_timeout=command_timeout, statement_cache_size=statement_cache_size, max_cached_statement_lifetime=max_cached_statement_lifetime, - max_cacheable_statement_size=max_cacheable_statement_size) + max_cacheable_statement_size=max_cacheable_statement_size, + ) class _StatementCacheEntry: diff --git a/asyncpg/cursor.py b/asyncpg/cursor.py index 030def0e..978824c3 100644 --- a/asyncpg/cursor.py +++ b/asyncpg/cursor.py @@ -19,15 +19,32 @@ class CursorFactory(connresource.ConnectionResource): results of a large query. """ - __slots__ = ('_state', '_args', '_prefetch', '_query', '_timeout') - - def __init__(self, connection, query, state, args, prefetch, timeout): + __slots__ = ( + '_state', + '_args', + '_prefetch', + '_query', + '_timeout', + '_record_class', + ) + + def __init__( + self, + connection, + query, + state, + args, + prefetch, + timeout, + record_class + ): super().__init__(connection) self._args = args self._prefetch = prefetch self._query = query self._timeout = timeout self._state = state + self._record_class = record_class if state is not None: state.attach() @@ -35,18 +52,28 @@ def __init__(self, connection, query, state, args, prefetch, timeout): @connresource.guarded def __aiter__(self): prefetch = 50 if self._prefetch is None else self._prefetch - return CursorIterator(self._connection, - self._query, self._state, - self._args, prefetch, - self._timeout) + return CursorIterator( + self._connection, + self._query, + self._state, + self._args, + self._record_class, + prefetch, + self._timeout, + ) @connresource.guarded def __await__(self): if self._prefetch is not None: raise exceptions.InterfaceError( 'prefetch argument can only be specified for iterable cursor') - cursor = Cursor(self._connection, self._query, - self._state, self._args) + cursor = Cursor( + self._connection, + self._query, + self._state, + self._args, + self._record_class, + ) return cursor._init(self._timeout).__await__() def __del__(self): @@ -57,9 +84,16 @@ def __del__(self): class BaseCursor(connresource.ConnectionResource): - __slots__ = ('_state', '_args', '_portal_name', '_exhausted', '_query') + __slots__ = ( + '_state', + '_args', + '_portal_name', + '_exhausted', + '_query', + '_record_class', + ) - def __init__(self, connection, query, state, args): + def __init__(self, connection, query, state, args, record_class): super().__init__(connection) self._args = args self._state = state @@ -68,6 +102,7 @@ def __init__(self, connection, query, state, args): self._portal_name = None self._exhausted = False self._query = query + self._record_class = record_class def _check_ready(self): if self._state is None: @@ -151,8 +186,17 @@ class CursorIterator(BaseCursor): __slots__ = ('_buffer', '_prefetch', '_timeout') - def __init__(self, connection, query, state, args, prefetch, timeout): - super().__init__(connection, query, state, args) + def __init__( + self, + connection, + query, + state, + args, + record_class, + prefetch, + timeout + ): + super().__init__(connection, query, state, args, record_class) if prefetch <= 0: raise exceptions.InterfaceError( @@ -171,7 +215,11 @@ def __aiter__(self): async def __anext__(self): if self._state is None: self._state = await self._connection._get_statement( - self._query, self._timeout, named=True) + self._query, + self._timeout, + named=True, + record_class=self._record_class, + ) self._state.attach() if not self._portal_name: @@ -196,7 +244,11 @@ class Cursor(BaseCursor): async def _init(self, timeout): if self._state is None: self._state = await self._connection._get_statement( - self._query, timeout, named=True) + self._query, + timeout, + named=True, + record_class=self._record_class, + ) self._state.attach() self._check_ready() await self._bind(timeout) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index ec42f816..b3947451 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -15,6 +15,7 @@ from . import connection from . import connect_utils from . import exceptions +from . import protocol logger = logging.getLogger(__name__) @@ -309,7 +310,7 @@ class Pool: '_init', '_connect_args', '_connect_kwargs', '_working_addr', '_working_config', '_working_params', '_holders', '_initialized', '_initializing', '_closing', - '_closed', '_connection_class', '_generation', + '_closed', '_connection_class', '_record_class', '_generation', '_setup', '_max_queries', '_max_inactive_connection_lifetime' ) @@ -322,6 +323,7 @@ def __init__(self, *connect_args, init, loop, connection_class, + record_class, **connect_kwargs): if len(connect_args) > 1: @@ -359,6 +361,11 @@ def __init__(self, *connect_args, 'connection_class is expected to be a subclass of ' 'asyncpg.Connection, got {!r}'.format(connection_class)) + if not issubclass(record_class, protocol.Record): + raise TypeError( + 'record_class is expected to be a subclass of ' + 'asyncpg.Record, got {!r}'.format(record_class)) + self._minsize = min_size self._maxsize = max_size @@ -372,6 +379,7 @@ def __init__(self, *connect_args, self._working_params = None self._connection_class = connection_class + self._record_class = record_class self._closing = False self._closed = False @@ -469,6 +477,7 @@ async def _get_new_connection(self): *self._connect_args, loop=self._loop, connection_class=self._connection_class, + record_class=self._record_class, **self._connect_kwargs) self._working_addr = con._addr @@ -484,7 +493,9 @@ async def _get_new_connection(self): timeout=self._working_params.connect_timeout, config=self._working_config, params=self._working_params, - connection_class=self._connection_class) + connection_class=self._connection_class, + record_class=self._record_class, + ) if self._init is not None: try: @@ -793,6 +804,7 @@ def create_pool(dsn=None, *, init=None, loop=None, connection_class=connection.Connection, + record_class=protocol.Record, **connect_kwargs): r"""Create a connection pool. @@ -851,6 +863,11 @@ def create_pool(dsn=None, *, The class to use for connections. Must be a subclass of :class:`~asyncpg.connection.Connection`. + :param type record_class: + If specified, the class to use for records returned by queries on + the connections in this pool. Must be a subclass of + :class:`~asyncpg.Record`. + :param int min_size: Number of connection the pool will be initialized with. @@ -901,10 +918,14 @@ def create_pool(dsn=None, *, or :meth:`Connection.add_log_listener() `) present on the connection at the moment of its release to the pool. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. """ return Pool( dsn, connection_class=connection_class, + record_class=record_class, min_size=min_size, max_size=max_size, max_queries=max_queries, loop=loop, setup=setup, init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, diff --git a/asyncpg/prepared_stmt.py b/asyncpg/prepared_stmt.py index 09a0a2ec..5df6b674 100644 --- a/asyncpg/prepared_stmt.py +++ b/asyncpg/prepared_stmt.py @@ -103,9 +103,15 @@ def cursor(self, *args, prefetch=None, :return: A :class:`~cursor.CursorFactory` object. """ - return cursor.CursorFactory(self._connection, self._query, - self._state, args, prefetch, - timeout) + return cursor.CursorFactory( + self._connection, + self._query, + self._state, + args, + prefetch, + timeout, + self._state.record_class, + ) @connresource.guarded async def explain(self, *args, analyze=False): diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index 5d3ccc4b..238fa280 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -7,6 +7,7 @@ from collections.abc import Mapping as MappingABC +import asyncpg from asyncpg import exceptions @@ -232,7 +233,7 @@ cdef class Codec: schema=self.schema, data_type=self.name, ) - result = record.ApgRecord_New(self.record_desc, elem_count) + result = record.ApgRecord_New(asyncpg.Record, self.record_desc, elem_count) for i in range(elem_count): elem_typ = self.element_type_oids[i] received_elem_typ = hton.unpack_int32(frb_read(buf, 4)) diff --git a/asyncpg/protocol/prepared_stmt.pxd b/asyncpg/protocol/prepared_stmt.pxd index 0d3f8d3b..90944c1a 100644 --- a/asyncpg/protocol/prepared_stmt.pxd +++ b/asyncpg/protocol/prepared_stmt.pxd @@ -11,6 +11,8 @@ cdef class PreparedStatementState: readonly str query readonly bint closed readonly int refs + readonly type record_class + list row_desc list parameters_desc diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index b69f76be..60094be6 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -11,7 +11,13 @@ from asyncpg import exceptions @cython.final cdef class PreparedStatementState: - def __cinit__(self, str name, str query, BaseProtocol protocol): + def __cinit__( + self, + str name, + str query, + BaseProtocol protocol, + type record_class + ): self.name = name self.query = query self.settings = protocol.settings @@ -21,6 +27,7 @@ cdef class PreparedStatementState: self.cols_desc = None self.closed = False self.refs = 0 + self.record_class = record_class def _get_parameters(self): cdef Codec codec @@ -264,7 +271,7 @@ cdef class PreparedStatementState: 'different from what was described ({})'.format( fnum, self.cols_num)) - dec_row = record.ApgRecord_New(self.cols_desc, fnum) + dec_row = record.ApgRecord_New(self.record_class, self.cols_desc, fnum) for i in range(fnum): flen = hton.unpack_int32(frb_read(&rbuf, 4)) diff --git a/asyncpg/protocol/protocol.pxd b/asyncpg/protocol/protocol.pxd index 14a7ecc6..772d6432 100644 --- a/asyncpg/protocol/protocol.pxd +++ b/asyncpg/protocol/protocol.pxd @@ -42,6 +42,7 @@ cdef class BaseProtocol(CoreProtocol): object timeout_callback object completed_callback object conref + type record_class bint is_reading str last_query diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 857fb4cc..4f7ce675 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -73,7 +73,7 @@ NO_TIMEOUT = object() cdef class BaseProtocol(CoreProtocol): - def __init__(self, addr, connected_fut, con_params, loop): + def __init__(self, addr, connected_fut, con_params, record_class: type, loop): # type of `con_params` is `_ConnectionParameters` CoreProtocol.__init__(self, con_params) @@ -85,6 +85,7 @@ cdef class BaseProtocol(CoreProtocol): self.address = addr self.settings = ConnectionSettings((self.address, con_params.database)) + self.record_class = record_class self.statement = None self.return_extra = False @@ -122,6 +123,9 @@ cdef class BaseProtocol(CoreProtocol): def get_settings(self): return self.settings + def get_record_class(self): + return self.record_class + def is_in_transaction(self): # PQTRANS_INTRANS = idle, within transaction block # PQTRANS_INERROR = idle, within failed transaction @@ -139,7 +143,9 @@ cdef class BaseProtocol(CoreProtocol): @cython.iterable_coroutine async def prepare(self, stmt_name, query, timeout, - PreparedStatementState state=None): + *, + PreparedStatementState state=None, + record_class): if self.cancel_waiter is not None: await self.cancel_waiter if self.cancel_sent_waiter is not None: @@ -154,7 +160,8 @@ cdef class BaseProtocol(CoreProtocol): self._prepare(stmt_name, query) # network op self.last_query = query if state is None: - state = PreparedStatementState(stmt_name, query, self) + state = PreparedStatementState( + stmt_name, query, self, record_class) self.statement = state except Exception as ex: waiter.set_exception(ex) @@ -955,7 +962,7 @@ def _create_record(object mapping, tuple elems): desc = record.ApgRecordDesc_New( mapping, tuple(mapping) if mapping else ()) - rec = record.ApgRecord_New(desc, len(elems)) + rec = record.ApgRecord_New(Record, desc, len(elems)) for i in range(len(elems)): elem = elems[i] cpython.Py_INCREF(elem) diff --git a/asyncpg/protocol/record/__init__.pxd b/asyncpg/protocol/record/__init__.pxd index 3d6b5fd7..43ac5e33 100644 --- a/asyncpg/protocol/record/__init__.pxd +++ b/asyncpg/protocol/record/__init__.pxd @@ -13,7 +13,7 @@ cdef extern from "record/recordobj.h": cpython.PyTypeObject *ApgRecord_InitTypes() except NULL int ApgRecord_CheckExact(object) - object ApgRecord_New(object, int) + object ApgRecord_New(type, object, int) void ApgRecord_SET_ITEM(object, int, object) object ApgRecordDesc_New(object, object) diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index ec5bb9af..de443fce 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -15,9 +15,14 @@ static PyObject * record_new_items_iter(PyObject *); static ApgRecordObject *free_list[ApgRecord_MAXSAVESIZE]; static int numfree[ApgRecord_MAXSAVESIZE]; +static size_t MAX_RECORD_SIZE = ( + ((size_t)PY_SSIZE_T_MAX - sizeof(ApgRecordObject) - sizeof(PyObject *)) + / sizeof(PyObject *) +); + PyObject * -ApgRecord_New(PyObject *desc, Py_ssize_t size) +ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) { ApgRecordObject *o; Py_ssize_t i; @@ -27,19 +32,36 @@ ApgRecord_New(PyObject *desc, Py_ssize_t size) return NULL; } - if (size < ApgRecord_MAXSAVESIZE && (o = free_list[size]) != NULL) { - free_list[size] = (ApgRecordObject *) o->ob_item[0]; - numfree[size]--; - _Py_NewReference((PyObject *)o); - } - else { - /* Check for overflow */ - if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(ApgRecordObject) - - sizeof(PyObject *)) / sizeof(PyObject *)) { + if (type == &ApgRecord_Type) { + if (size < ApgRecord_MAXSAVESIZE && (o = free_list[size]) != NULL) { + free_list[size] = (ApgRecordObject *) o->ob_item[0]; + numfree[size]--; + _Py_NewReference((PyObject *)o); + } + else { + /* Check for overflow */ + if ((size_t)size > MAX_RECORD_SIZE) { + return PyErr_NoMemory(); + } + o = PyObject_GC_NewVar(ApgRecordObject, &ApgRecord_Type, size); + if (o == NULL) { + return NULL; + } + } + + PyObject_GC_Track(o); + } else { + assert(PyType_IsSubtype(type, &ApgRecord_Type)); + + if ((size_t)size > MAX_RECORD_SIZE) { return PyErr_NoMemory(); } - o = PyObject_GC_NewVar(ApgRecordObject, &ApgRecord_Type, size); - if (o == NULL) { + o = (ApgRecordObject *)type->tp_alloc(type, size); + if (!_PyObject_GC_IS_TRACKED(o)) { + PyErr_SetString( + PyExc_TypeError, + "record subclass is not tracked by GC" + ); return NULL; } } @@ -51,7 +73,6 @@ ApgRecord_New(PyObject *desc, Py_ssize_t size) Py_INCREF(desc); o->desc = (ApgRecordDescObject*)desc; o->self_hash = -1; - PyObject_GC_Track(o); return (PyObject *) o; } diff --git a/asyncpg/protocol/record/recordobj.h b/asyncpg/protocol/record/recordobj.h index d329f57e..2c6c1f1c 100644 --- a/asyncpg/protocol/record/recordobj.h +++ b/asyncpg/protocol/record/recordobj.h @@ -46,7 +46,7 @@ extern PyTypeObject ApgRecordDesc_Type; (((ApgRecordObject *)(op))->ob_item[i]) PyTypeObject *ApgRecord_InitTypes(void); -PyObject *ApgRecord_New(PyObject *, Py_ssize_t); +PyObject *ApgRecord_New(PyTypeObject *, PyObject *, Py_ssize_t); PyObject *ApgRecordDesc_New(PyObject *, PyObject *); #endif diff --git a/tests/test_record.py b/tests/test_record.py index e9abab45..8abe90ee 100644 --- a/tests/test_record.py +++ b/tests/test_record.py @@ -22,6 +22,14 @@ R_ABC = collections.OrderedDict([('a', 0), ('b', 1), ('c', 2)]) +class CustomRecord(asyncpg.Record): + pass + + +class AnotherCustomRecord(asyncpg.Record): + pass + + class TestRecord(tb.ConnectedTestCase): @contextlib.contextmanager @@ -339,3 +347,169 @@ async def test_record_no_new(self): with self.assertRaisesRegex( TypeError, "cannot create 'asyncpg.Record' instances"): asyncpg.Record() + + @tb.with_connection_options(record_class=CustomRecord) + async def test_record_subclass_01(self): + r = await self.con.fetchrow("SELECT 1 as a, '2' as b") + self.assertIsInstance(r, CustomRecord) + + r = await self.con.fetch("SELECT 1 as a, '2' as b") + self.assertIsInstance(r[0], CustomRecord) + + async with self.con.transaction(): + cur = await self.con.cursor("SELECT 1 as a, '2' as b") + r = await cur.fetchrow() + self.assertIsInstance(r, CustomRecord) + + cur = await self.con.cursor("SELECT 1 as a, '2' as b") + r = await cur.fetch(1) + self.assertIsInstance(r[0], CustomRecord) + + async with self.con.transaction(): + cur = self.con.cursor("SELECT 1 as a, '2' as b") + async for r in cur: + self.assertIsInstance(r, CustomRecord) + + ps = await self.con.prepare("SELECT 1 as a, '2' as b") + r = await ps.fetchrow() + self.assertIsInstance(r, CustomRecord) + + async def test_record_subclass_02(self): + r = await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + self.assertIsInstance(r, CustomRecord) + + r = await self.con.fetch( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + self.assertIsInstance(r[0], CustomRecord) + + async with self.con.transaction(): + cur = await self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + r = await cur.fetchrow() + self.assertIsInstance(r, CustomRecord) + + cur = await self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + r = await cur.fetch(1) + self.assertIsInstance(r[0], CustomRecord) + + async with self.con.transaction(): + cur = self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + async for r in cur: + self.assertIsInstance(r, CustomRecord) + + ps = await self.con.prepare( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + r = await ps.fetchrow() + self.assertIsInstance(r, CustomRecord) + + r = await ps.fetch() + self.assertIsInstance(r[0], CustomRecord) + + @tb.with_connection_options(record_class=AnotherCustomRecord) + async def test_record_subclass_03(self): + r = await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + self.assertIsInstance(r, CustomRecord) + + r = await self.con.fetch( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + self.assertIsInstance(r[0], CustomRecord) + + async with self.con.transaction(): + cur = await self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + r = await cur.fetchrow() + self.assertIsInstance(r, CustomRecord) + + cur = await self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + r = await cur.fetch(1) + self.assertIsInstance(r[0], CustomRecord) + + async with self.con.transaction(): + cur = self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + async for r in cur: + self.assertIsInstance(r, CustomRecord) + + ps = await self.con.prepare( + "SELECT 1 as a, '2' as b", + record_class=CustomRecord, + ) + r = await ps.fetchrow() + self.assertIsInstance(r, CustomRecord) + + r = await ps.fetch() + self.assertIsInstance(r[0], CustomRecord) + + @tb.with_connection_options(record_class=CustomRecord) + async def test_record_subclass_04(self): + r = await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=asyncpg.Record, + ) + self.assertIs(type(r), asyncpg.Record) + + r = await self.con.fetch( + "SELECT 1 as a, '2' as b", + record_class=asyncpg.Record, + ) + self.assertIs(type(r[0]), asyncpg.Record) + + async with self.con.transaction(): + cur = await self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=asyncpg.Record, + ) + r = await cur.fetchrow() + self.assertIs(type(r), asyncpg.Record) + + cur = await self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=asyncpg.Record, + ) + r = await cur.fetch(1) + self.assertIs(type(r[0]), asyncpg.Record) + + async with self.con.transaction(): + cur = self.con.cursor( + "SELECT 1 as a, '2' as b", + record_class=asyncpg.Record, + ) + async for r in cur: + self.assertIs(type(r), asyncpg.Record) + + ps = await self.con.prepare( + "SELECT 1 as a, '2' as b", + record_class=asyncpg.Record, + ) + r = await ps.fetchrow() + self.assertIs(type(r), asyncpg.Record) + + r = await ps.fetch() + self.assertIs(type(r[0]), asyncpg.Record) diff --git a/tests/test_timeout.py b/tests/test_timeout.py index c2bca631..152a504a 100644 --- a/tests/test_timeout.py +++ b/tests/test_timeout.py @@ -138,9 +138,9 @@ async def test_command_timeout_01(self): class SlowPrepareConnection(pg_connection.Connection): """Connection class to test timeouts.""" - async def _get_statement(self, query, timeout): + async def _get_statement(self, query, timeout, **kwargs): await asyncio.sleep(0.3) - return await super()._get_statement(query, timeout) + return await super()._get_statement(query, timeout, **kwargs) class TestTimeoutCoversPrepare(tb.ConnectedTestCase): From c05d7260020f16792f098d19d7444fb07f5b9401 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 18 Aug 2020 18:07:50 -0700 Subject: [PATCH 003/193] Fix compilation under Python 3.9+ (#610) Python 3.9 moved a bunch of GC-related symbols around, including `_PyObject_GC_IS_TRACKED` which is used in `recordobj.c`. Fixes: #609 --- asyncpg/protocol/record/recordobj.c | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index de443fce..b734ee9b 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -8,6 +8,11 @@ #include "recordobj.h" +#ifdef _PyObject_GC_IS_TRACKED +# define _ApgObject_GC_IS_TRACKED _PyObject_GC_IS_TRACKED +#else +# define _ApgObject_GC_IS_TRACKED PyObject_GC_IsTracked +#endif static PyObject * record_iter(PyObject *); static PyObject * record_new_items_iter(PyObject *); @@ -57,7 +62,7 @@ ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) return PyErr_NoMemory(); } o = (ApgRecordObject *)type->tp_alloc(type, size); - if (!_PyObject_GC_IS_TRACKED(o)) { + if (!_ApgObject_GC_IS_TRACKED(o)) { PyErr_SetString( PyExc_TypeError, "record subclass is not tracked by GC" From 2bac166c1ba098b9ebdfca3dc5b8264ae850213c Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 15 Aug 2020 16:59:55 -0700 Subject: [PATCH 004/193] Add a workaround for bpo-37658 `asyncio.wait_for()` currently has a bug where it raises a `CancelledError` even when the wrapped awaitable has completed. The upstream fix is in python/cpython#21894. This adds a workaround until the aforementioned PR is merged, backported and released. Co-authored-by: Adam Liddell Fixes: #467 Fixes: #547 Related: #468 Supersedes: #548 --- asyncpg/compat.py | 16 ++++++++++++++++ asyncpg/connect_utils.py | 18 ++---------------- asyncpg/pool.py | 5 +++-- tests/test_pool.py | 20 ++++++++++++++++++++ 4 files changed, 41 insertions(+), 18 deletions(-) diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 99a561d0..6dbce3c9 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -90,3 +90,19 @@ async def wait_closed(stream): # On Windows wait_closed() sometimes propagates # ConnectionResetError which is totally unnecessary. pass + + +# Workaround for https://bugs.python.org/issue37658 +async def wait_for(fut, timeout): + if timeout is None: + return await fut + + fut = asyncio.ensure_future(fut) + + try: + return await asyncio.wait_for(fut, timeout) + except asyncio.CancelledError: + if fut.done(): + return fut.result() + else: + raise diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index e5feebc2..65261664 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -636,18 +636,13 @@ async def _connect_addr( connector = asyncio.ensure_future(connector) before = time.monotonic() - try: - tr, pr = await asyncio.wait_for( - connector, timeout=timeout) - except asyncio.CancelledError: - connector.add_done_callback(_close_leaked_connection) - raise + tr, pr = await compat.wait_for(connector, timeout=timeout) timeout -= time.monotonic() - before try: if timeout <= 0: raise asyncio.TimeoutError - await asyncio.wait_for(connected, timeout=timeout) + await compat.wait_for(connected, timeout=timeout) except (Exception, asyncio.CancelledError): tr.close() raise @@ -745,12 +740,3 @@ def _create_future(loop): return asyncio.Future(loop=loop) else: return create_future() - - -def _close_leaked_connection(fut): - try: - tr, pr = fut.result() - if tr: - tr.close() - except asyncio.CancelledError: - pass # hide the exception diff --git a/asyncpg/pool.py b/asyncpg/pool.py index b3947451..c4321a2f 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -12,6 +12,7 @@ import time import warnings +from . import compat from . import connection from . import connect_utils from . import exceptions @@ -198,7 +199,7 @@ async def release(self, timeout): # If the connection is in cancellation state, # wait for the cancellation started = time.monotonic() - await asyncio.wait_for( + await compat.wait_for( self._con._protocol._wait_for_cancellation(), budget) if budget is not None: @@ -623,7 +624,7 @@ async def _acquire_impl(): if timeout is None: return await _acquire_impl() else: - return await asyncio.wait_for( + return await compat.wait_for( _acquire_impl(), timeout=timeout) async def release(self, connection, *, timeout=None): diff --git a/tests/test_pool.py b/tests/test_pool.py index e51923e4..9857dceb 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -379,6 +379,26 @@ async def worker(): self.cluster.trust_local_connections() self.cluster.reload() + async def test_pool_handles_task_cancel_in_acquire_with_timeout(self): + # See https://github.com/MagicStack/asyncpg/issues/547 + pool = await self.create_pool(database='postgres', + min_size=1, max_size=1) + + async def worker(): + async with pool.acquire(timeout=100): + pass + + # Schedule task + task = self.loop.create_task(worker()) + # Yield to task, but cancel almost immediately + await asyncio.sleep(0.00000000001) + # Cancel the worker. + task.cancel() + # Wait to make sure the cleanup has completed. + await asyncio.sleep(0.4) + # Check that the connection has been returned to the pool. + self.assertEqual(pool._queue.qsize(), 1) + async def test_pool_handles_task_cancel_in_release(self): # Use SlowResetConnectionPool to simulate # the Task.cancel() and __aexit__ race. From 4a627d551b5a9161cb8b5a01d1c3a5f71c4d0c12 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Mon, 21 Sep 2020 23:44:42 -0500 Subject: [PATCH 005/193] Fix wrong default transaction isolation level (#622) * Fix wrong default transaction isolation level This fixes the issue when the default_transaction_isolation is not "read committed", `transaction(isolation='read_committed')` won't start a transaction in "read committed" isolation level. --- asyncpg/connection.py | 6 ++-- asyncpg/transaction.py | 30 ++++++++++++------ tests/test_transaction.py | 67 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 92 insertions(+), 11 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 76c33a9d..338c0899 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -228,7 +228,7 @@ def get_settings(self): """ return self._protocol.get_settings() - def transaction(self, *, isolation='read_committed', readonly=False, + def transaction(self, *, isolation=None, readonly=False, deferrable=False): """Create a :class:`~transaction.Transaction` object. @@ -237,7 +237,9 @@ def transaction(self, *, isolation='read_committed', readonly=False, :param isolation: Transaction isolation mode, can be one of: `'serializable'`, `'repeatable_read'`, - `'read_committed'`. + `'read_committed'`. If not specified, the behavior + is up to the server and session, which is usually + ``read_committed``. :param readonly: Specifies whether or not this transaction is read-only. diff --git a/asyncpg/transaction.py b/asyncpg/transaction.py index 55768c46..4c799925 100644 --- a/asyncpg/transaction.py +++ b/asyncpg/transaction.py @@ -20,6 +20,11 @@ class TransactionState(enum.Enum): ISOLATION_LEVELS = {'read_committed', 'serializable', 'repeatable_read'} +ISOLATION_LEVELS_BY_VALUE = { + 'read committed': 'read_committed', + 'serializable': 'serializable', + 'repeatable read': 'repeatable_read', +} class Transaction(connresource.ConnectionResource): @@ -36,12 +41,12 @@ class Transaction(connresource.ConnectionResource): def __init__(self, connection, isolation, readonly, deferrable): super().__init__(connection) - if isolation not in ISOLATION_LEVELS: + if isolation and isolation not in ISOLATION_LEVELS: raise ValueError( 'isolation is expected to be either of {}, ' 'got {!r}'.format(ISOLATION_LEVELS, isolation)) - if isolation != 'serializable': + if isolation and isolation != 'serializable': if readonly: raise ValueError( '"readonly" is only supported for ' @@ -110,20 +115,27 @@ async def start(self): con._top_xact = self else: # Nested transaction block - top_xact = con._top_xact - if self._isolation != top_xact._isolation: - raise apg_errors.InterfaceError( - 'nested transaction has a different isolation level: ' - 'current {!r} != outer {!r}'.format( - self._isolation, top_xact._isolation)) + if self._isolation: + top_xact_isolation = con._top_xact._isolation + if top_xact_isolation is None: + top_xact_isolation = ISOLATION_LEVELS_BY_VALUE[ + await self._connection.fetchval( + 'SHOW transaction_isolation;')] + if self._isolation != top_xact_isolation: + raise apg_errors.InterfaceError( + 'nested transaction has a different isolation level: ' + 'current {!r} != outer {!r}'.format( + self._isolation, top_xact_isolation)) self._nested = True if self._nested: self._id = con._get_unique_id('savepoint') query = 'SAVEPOINT {};'.format(self._id) else: - if self._isolation == 'read_committed': + if self._isolation is None: query = 'BEGIN;' + elif self._isolation == 'read_committed': + query = 'BEGIN ISOLATION LEVEL READ COMMITTED;' elif self._isolation == 'repeatable_read': query = 'BEGIN ISOLATION LEVEL REPEATABLE READ;' else: diff --git a/tests/test_transaction.py b/tests/test_transaction.py index eb2d948e..8b7ffd95 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -179,3 +179,70 @@ async def test_transaction_within_manual_transaction(self): self.assertIsNone(self.con._top_xact) self.assertFalse(self.con.is_in_transaction()) + + async def test_isolation_level(self): + await self.con.reset() + default_isolation = await self.con.fetchval( + 'SHOW default_transaction_isolation' + ) + isolation_levels = { + None: default_isolation, + 'read_committed': 'read committed', + 'repeatable_read': 'repeatable read', + 'serializable': 'serializable', + } + set_sql = 'SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL ' + get_sql = 'SHOW TRANSACTION ISOLATION LEVEL' + for tx_level in isolation_levels: + for conn_level in isolation_levels: + with self.subTest(conn=conn_level, tx=tx_level): + if conn_level: + await self.con.execute( + set_sql + isolation_levels[conn_level] + ) + level = await self.con.fetchval(get_sql) + self.assertEqual(level, isolation_levels[conn_level]) + async with self.con.transaction(isolation=tx_level): + level = await self.con.fetchval(get_sql) + self.assertEqual( + level, + isolation_levels[tx_level or conn_level], + ) + await self.con.reset() + + async def test_nested_isolation_level(self): + set_sql = 'SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL ' + isolation_levels = { + 'read_committed': 'read committed', + 'repeatable_read': 'repeatable read', + 'serializable': 'serializable', + } + for inner in [None] + list(isolation_levels): + for outer, outer_sql_level in isolation_levels.items(): + for implicit in [False, True]: + with self.subTest( + implicit=implicit, outer=outer, inner=inner, + ): + if implicit: + await self.con.execute(set_sql + outer_sql_level) + outer_level = None + else: + outer_level = outer + + async with self.con.transaction(isolation=outer_level): + if inner and outer != inner: + with self.assertRaisesRegex( + asyncpg.InterfaceError, + 'current {!r} != outer {!r}'.format( + inner, outer + ) + ): + async with self.con.transaction( + isolation=inner, + ): + pass + else: + async with self.con.transaction( + isolation=inner, + ): + pass From 68b40cbf7f52df1c4e25fdbf3fc48fcf81f418c7 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 21 Sep 2020 21:45:49 -0700 Subject: [PATCH 006/193] Fix set_type_codec() to accept standard SQL type names (#619) Currently, `Connection.set_type_codec()` only accepts type names as they appear in `pg_catalog.pg_type` and would refuse to handle a standard SQL spelling of a type like `character varying`. This is an oversight, as the internal type names aren't really supposed to be treated as public Postgres API. Additionally, for historical reasons, Postgres has a single-byte `"char"` type, which is distinct from both `varchar` and SQL `char`, which may lead to massive confusion if a user sets up a custom codec on it expecting to handle the `char(n)` type instead. Issue: #617 --- asyncpg/connection.py | 51 +++++++++++++++++++++--------------- asyncpg/introspection.py | 12 +++++++++ asyncpg/protocol/__init__.py | 3 ++- asyncpg/protocol/pgtypes.pxi | 18 +++++++++++++ tests/test_codecs.py | 33 +++++++++++++++++++++++ 5 files changed, 95 insertions(+), 22 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 338c0899..4b4d8a23 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -428,6 +428,32 @@ async def _introspect_types(self, typeoids, timeout): return await self.__execute( self._intro_query, (list(typeoids),), 0, timeout) + async def _introspect_type(self, typename, schema): + if ( + schema == 'pg_catalog' + and typename.lower() in protocol.BUILTIN_TYPE_NAME_MAP + ): + typeoid = protocol.BUILTIN_TYPE_NAME_MAP[typename.lower()] + rows = await self._execute( + introspection.TYPE_BY_OID, + [typeoid], + limit=0, + timeout=None, + ) + if rows: + typeinfo = rows[0] + else: + typeinfo = None + else: + typeinfo = await self.fetchrow( + introspection.TYPE_BY_NAME, typename, schema) + + if not typeinfo: + raise ValueError( + 'unknown type: {}.{}'.format(schema, typename)) + + return typeinfo + def cursor( self, query, @@ -1110,12 +1136,7 @@ async def set_type_codec(self, typename, *, ``format``. """ self._check_open() - - typeinfo = await self.fetchrow( - introspection.TYPE_BY_NAME, typename, schema) - if not typeinfo: - raise ValueError('unknown type: {}.{}'.format(schema, typename)) - + typeinfo = await self._introspect_type(typename, schema) if not introspection.is_scalar_type(typeinfo): raise ValueError( 'cannot use custom codec on non-scalar type {}.{}'.format( @@ -1142,15 +1163,9 @@ async def reset_type_codec(self, typename, *, schema='public'): .. versionadded:: 0.12.0 """ - typeinfo = await self.fetchrow( - introspection.TYPE_BY_NAME, typename, schema) - if not typeinfo: - raise ValueError('unknown type: {}.{}'.format(schema, typename)) - - oid = typeinfo['oid'] - + typeinfo = await self._introspect_type(typename, schema) self._protocol.get_settings().remove_python_codec( - oid, typename, schema) + typeinfo['oid'], typename, schema) # Statement cache is no longer valid due to codec changes. self._drop_local_statement_cache() @@ -1191,13 +1206,7 @@ async def set_builtin_type_codec(self, typename, *, core data type. Added the *format* keyword argument. """ self._check_open() - - typeinfo = await self.fetchrow( - introspection.TYPE_BY_NAME, typename, schema) - if not typeinfo: - raise exceptions.InterfaceError( - 'unknown type: {}.{}'.format(schema, typename)) - + typeinfo = await self._introspect_type(typename, schema) if not introspection.is_scalar_type(typeinfo): raise exceptions.InterfaceError( 'cannot alias non-scalar type {}.{}'.format( diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index 201f4341..4854e712 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -147,6 +147,18 @@ ''' +TYPE_BY_OID = '''\ +SELECT + t.oid, + t.typelem AS elemtype, + t.typtype AS kind +FROM + pg_catalog.pg_type AS t +WHERE + t.oid = $1 +''' + + # 'b' for a base type, 'd' for a domain, 'e' for enum. SCALAR_TYPE_KINDS = (b'b', b'd', b'e') diff --git a/asyncpg/protocol/__init__.py b/asyncpg/protocol/__init__.py index e872e2fa..8b3e06a0 100644 --- a/asyncpg/protocol/__init__.py +++ b/asyncpg/protocol/__init__.py @@ -4,5 +4,6 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +# flake8: NOQA -from .protocol import Protocol, Record, NO_TIMEOUT # NOQA +from .protocol import Protocol, Record, NO_TIMEOUT, BUILTIN_TYPE_NAME_MAP diff --git a/asyncpg/protocol/pgtypes.pxi b/asyncpg/protocol/pgtypes.pxi index 14db69df..1be40fb2 100644 --- a/asyncpg/protocol/pgtypes.pxi +++ b/asyncpg/protocol/pgtypes.pxi @@ -216,5 +216,23 @@ BUILTIN_TYPE_NAME_MAP['double precision'] = \ BUILTIN_TYPE_NAME_MAP['timestamp with timezone'] = \ BUILTIN_TYPE_NAME_MAP['timestamptz'] +BUILTIN_TYPE_NAME_MAP['timestamp without timezone'] = \ + BUILTIN_TYPE_NAME_MAP['timestamp'] + BUILTIN_TYPE_NAME_MAP['time with timezone'] = \ BUILTIN_TYPE_NAME_MAP['timetz'] + +BUILTIN_TYPE_NAME_MAP['time without timezone'] = \ + BUILTIN_TYPE_NAME_MAP['time'] + +BUILTIN_TYPE_NAME_MAP['char'] = \ + BUILTIN_TYPE_NAME_MAP['bpchar'] + +BUILTIN_TYPE_NAME_MAP['character'] = \ + BUILTIN_TYPE_NAME_MAP['bpchar'] + +BUILTIN_TYPE_NAME_MAP['character varying'] = \ + BUILTIN_TYPE_NAME_MAP['varchar'] + +BUILTIN_TYPE_NAME_MAP['bit varying'] = \ + BUILTIN_TYPE_NAME_MAP['varbit'] diff --git a/tests/test_codecs.py b/tests/test_codecs.py index abd3c668..9b9c52b3 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1255,6 +1255,39 @@ async def test_custom_codec_on_domain(self): finally: await self.con.execute('DROP DOMAIN custom_codec_t') + async def test_custom_codec_on_stdsql_types(self): + types = [ + 'smallint', + 'int', + 'integer', + 'bigint', + 'decimal', + 'real', + 'double precision', + 'timestamp with timezone', + 'time with timezone', + 'timestamp without timezone', + 'time without timezone', + 'char', + 'character', + 'character varying', + 'bit varying', + 'CHARACTER VARYING' + ] + + for t in types: + with self.subTest(type=t): + try: + await self.con.set_type_codec( + t, + schema='pg_catalog', + encoder=str, + decoder=str, + format='text' + ) + finally: + await self.con.reset_type_codec(t, schema='pg_catalog') + async def test_custom_codec_on_enum(self): """Test encoding/decoding using a custom codec on an enum.""" await self.con.execute(''' From e064f59e7944bf7d85fde597ed2eceeec7d8a2b6 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Thu, 24 Sep 2020 20:51:28 -0500 Subject: [PATCH 007/193] Ignore custom data codec for internal introspection (#618) Fixes: #617 --- asyncpg/connection.py | 45 ++++++++++++++++++++++-------- asyncpg/protocol/codecs/base.pxd | 3 +- asyncpg/protocol/codecs/base.pyx | 22 ++++++++------- asyncpg/protocol/prepared_stmt.pxd | 1 + asyncpg/protocol/prepared_stmt.pyx | 10 +++++-- asyncpg/protocol/protocol.pyx | 3 +- asyncpg/protocol/settings.pxd | 3 +- asyncpg/protocol/settings.pyx | 12 +++++--- tests/test_introspection.py | 15 ++++++++++ 9 files changed, 82 insertions(+), 32 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 4b4d8a23..b7266471 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -342,13 +342,16 @@ async def _get_statement( *, named: bool=False, use_cache: bool=True, + ignore_custom_codec=False, record_class=None ): if record_class is None: record_class = self._protocol.get_record_class() if use_cache: - statement = self._stmt_cache.get((query, record_class)) + statement = self._stmt_cache.get( + (query, record_class, ignore_custom_codec) + ) if statement is not None: return statement @@ -371,6 +374,7 @@ async def _get_statement( query, timeout, record_class=record_class, + ignore_custom_codec=ignore_custom_codec, ) need_reprepare = False types_with_missing_codecs = statement._init_types() @@ -415,7 +419,8 @@ async def _get_statement( ) if use_cache: - self._stmt_cache.put((query, record_class), statement) + self._stmt_cache.put( + (query, record_class, ignore_custom_codec), statement) # If we've just created a new statement object, check if there # are any statements for GC. @@ -426,7 +431,12 @@ async def _get_statement( async def _introspect_types(self, typeoids, timeout): return await self.__execute( - self._intro_query, (list(typeoids),), 0, timeout) + self._intro_query, + (list(typeoids),), + 0, + timeout, + ignore_custom_codec=True, + ) async def _introspect_type(self, typename, schema): if ( @@ -439,20 +449,22 @@ async def _introspect_type(self, typename, schema): [typeoid], limit=0, timeout=None, + ignore_custom_codec=True, ) - if rows: - typeinfo = rows[0] - else: - typeinfo = None else: - typeinfo = await self.fetchrow( - introspection.TYPE_BY_NAME, typename, schema) + rows = await self._execute( + introspection.TYPE_BY_NAME, + [typename, schema], + limit=1, + timeout=None, + ignore_custom_codec=True, + ) - if not typeinfo: + if not rows: raise ValueError( 'unknown type: {}.{}'.format(schema, typename)) - return typeinfo + return rows[0] def cursor( self, @@ -1325,7 +1337,9 @@ def _mark_stmts_as_closed(self): def _maybe_gc_stmt(self, stmt): if ( stmt.refs == 0 - and not self._stmt_cache.has((stmt.query, stmt.record_class)) + and not self._stmt_cache.has( + (stmt.query, stmt.record_class, stmt.ignore_custom_codec) + ) ): # If low-level `stmt` isn't referenced from any high-level # `PreparedStatement` object and is not in the `_stmt_cache`: @@ -1589,6 +1603,7 @@ async def _execute( timeout, *, return_status=False, + ignore_custom_codec=False, record_class=None ): with self._stmt_exclusive_section: @@ -1599,6 +1614,7 @@ async def _execute( timeout, return_status=return_status, record_class=record_class, + ignore_custom_codec=ignore_custom_codec, ) return result @@ -1610,6 +1626,7 @@ async def __execute( timeout, *, return_status=False, + ignore_custom_codec=False, record_class=None ): executor = lambda stmt, timeout: self._protocol.bind_execute( @@ -1620,6 +1637,7 @@ async def __execute( executor, timeout, record_class=record_class, + ignore_custom_codec=ignore_custom_codec, ) async def _executemany(self, query, args, timeout): @@ -1637,6 +1655,7 @@ async def _do_execute( timeout, retry=True, *, + ignore_custom_codec=False, record_class=None ): if timeout is None: @@ -1644,6 +1663,7 @@ async def _do_execute( query, None, record_class=record_class, + ignore_custom_codec=ignore_custom_codec, ) else: before = time.monotonic() @@ -1651,6 +1671,7 @@ async def _do_execute( query, timeout, record_class=record_class, + ignore_custom_codec=ignore_custom_codec, ) after = time.monotonic() timeout -= after - before diff --git a/asyncpg/protocol/codecs/base.pxd b/asyncpg/protocol/codecs/base.pxd index be1f0a3f..e8136f7b 100644 --- a/asyncpg/protocol/codecs/base.pxd +++ b/asyncpg/protocol/codecs/base.pxd @@ -166,5 +166,6 @@ cdef class DataCodecConfig: dict _derived_type_codecs dict _custom_type_codecs - cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format) + cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format, + bint ignore_custom_codec=*) cdef inline Codec get_any_local_codec(self, uint32_t oid) diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index 238fa280..1c930cd0 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -692,18 +692,20 @@ cdef class DataCodecConfig: return codec - cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format): + cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format, + bint ignore_custom_codec=False): cdef Codec codec - codec = self.get_any_local_codec(oid) - if codec is not None: - if codec.format != format: - # The codec for this OID has been overridden by - # set_{builtin}_type_codec with a different format. - # We must respect that and not return a core codec. - return None - else: - return codec + if not ignore_custom_codec: + codec = self.get_any_local_codec(oid) + if codec is not None: + if codec.format != format: + # The codec for this OID has been overridden by + # set_{builtin}_type_codec with a different format. + # We must respect that and not return a core codec. + return None + else: + return codec codec = get_core_codec(oid, format) if codec is not None: diff --git a/asyncpg/protocol/prepared_stmt.pxd b/asyncpg/protocol/prepared_stmt.pxd index 90944c1a..4427bfdc 100644 --- a/asyncpg/protocol/prepared_stmt.pxd +++ b/asyncpg/protocol/prepared_stmt.pxd @@ -12,6 +12,7 @@ cdef class PreparedStatementState: readonly bint closed readonly int refs readonly type record_class + readonly bint ignore_custom_codec list row_desc diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index 60094be6..fd9f5a26 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -16,7 +16,8 @@ cdef class PreparedStatementState: str name, str query, BaseProtocol protocol, - type record_class + type record_class, + bint ignore_custom_codec ): self.name = name self.query = query @@ -28,6 +29,7 @@ cdef class PreparedStatementState: self.closed = False self.refs = 0 self.record_class = record_class + self.ignore_custom_codec = ignore_custom_codec def _get_parameters(self): cdef Codec codec @@ -205,7 +207,8 @@ cdef class PreparedStatementState: cols_mapping[col_name] = i cols_names.append(col_name) oid = row[3] - codec = self.settings.get_data_codec(oid) + codec = self.settings.get_data_codec( + oid, ignore_custom_codec=self.ignore_custom_codec) if codec is None or not codec.has_decoder(): raise exceptions.InternalClientError( 'no decoder for OID {}'.format(oid)) @@ -230,7 +233,8 @@ cdef class PreparedStatementState: for i from 0 <= i < self.args_num: p_oid = self.parameters_desc[i] - codec = self.settings.get_data_codec(p_oid) + codec = self.settings.get_data_codec( + p_oid, ignore_custom_codec=self.ignore_custom_codec) if codec is None or not codec.has_encoder(): raise exceptions.InternalClientError( 'no encoder for OID {}'.format(p_oid)) diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 4f7ce675..a6d9ad5d 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -145,6 +145,7 @@ cdef class BaseProtocol(CoreProtocol): async def prepare(self, stmt_name, query, timeout, *, PreparedStatementState state=None, + ignore_custom_codec=False, record_class): if self.cancel_waiter is not None: await self.cancel_waiter @@ -161,7 +162,7 @@ cdef class BaseProtocol(CoreProtocol): self.last_query = query if state is None: state = PreparedStatementState( - stmt_name, query, self, record_class) + stmt_name, query, self, record_class, ignore_custom_codec) self.statement = state except Exception as ex: waiter.set_exception(ex) diff --git a/asyncpg/protocol/settings.pxd b/asyncpg/protocol/settings.pxd index 44b673c2..41131cdc 100644 --- a/asyncpg/protocol/settings.pxd +++ b/asyncpg/protocol/settings.pxd @@ -26,4 +26,5 @@ cdef class ConnectionSettings(pgproto.CodecContext): cpdef inline set_builtin_type_codec( self, typeoid, typename, typeschema, typekind, alias_to, format) cpdef inline Codec get_data_codec( - self, uint32_t oid, ServerDataFormat format=*) + self, uint32_t oid, ServerDataFormat format=*, + bint ignore_custom_codec=*) diff --git a/asyncpg/protocol/settings.pyx b/asyncpg/protocol/settings.pyx index 2ea72169..6121fce4 100644 --- a/asyncpg/protocol/settings.pyx +++ b/asyncpg/protocol/settings.pyx @@ -87,14 +87,18 @@ cdef class ConnectionSettings(pgproto.CodecContext): typekind, alias_to, _format) cpdef inline Codec get_data_codec(self, uint32_t oid, - ServerDataFormat format=PG_FORMAT_ANY): + ServerDataFormat format=PG_FORMAT_ANY, + bint ignore_custom_codec=False): if format == PG_FORMAT_ANY: - codec = self._data_codecs.get_codec(oid, PG_FORMAT_BINARY) + codec = self._data_codecs.get_codec( + oid, PG_FORMAT_BINARY, ignore_custom_codec) if codec is None: - codec = self._data_codecs.get_codec(oid, PG_FORMAT_TEXT) + codec = self._data_codecs.get_codec( + oid, PG_FORMAT_TEXT, ignore_custom_codec) return codec else: - return self._data_codecs.get_codec(oid, format) + return self._data_codecs.get_codec( + oid, format, ignore_custom_codec) def __getattr__(self, name): if not name.startswith('_'): diff --git a/tests/test_introspection.py b/tests/test_introspection.py index eb3258f9..7de4236f 100644 --- a/tests/test_introspection.py +++ b/tests/test_introspection.py @@ -43,6 +43,20 @@ def tearDownClass(cls): super().tearDownClass() + def setUp(self): + super().setUp() + self.loop.run_until_complete(self._add_custom_codec(self.con)) + + async def _add_custom_codec(self, conn): + # mess up with the codec - builtin introspection shouldn't be affected + await conn.set_type_codec( + "oid", + schema="pg_catalog", + encoder=lambda value: None, + decoder=lambda value: None, + format="text", + ) + @tb.with_connection_options(database='asyncpg_intro_test') async def test_introspection_on_large_db(self): await self.con.execute( @@ -142,6 +156,7 @@ async def test_introspection_retries_after_cache_bust(self): # query would cause introspection to retry. slow_intro_conn = await self.connect( connection_class=SlowIntrospectionConnection) + await self._add_custom_codec(slow_intro_conn) try: await self.con.execute(''' CREATE DOMAIN intro_1_t AS int; From 92aa80622788402d40251f88c978b776f7a0ecf6 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Fri, 25 Sep 2020 14:05:13 -0500 Subject: [PATCH 008/193] Fix null/NULL quoting in array text encoder (#627) When given a textual json codec that yields 'null', the array encoder should generate b'["null"]' instead of b'["NULL"]' for a JSON[] type. Refs: #623 --- asyncpg/protocol/codecs/array.pyx | 4 +++- tests/test_codecs.py | 37 +++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/asyncpg/protocol/codecs/array.pyx b/asyncpg/protocol/codecs/array.pyx index e975b9f6..3c39e49c 100644 --- a/asyncpg/protocol/codecs/array.pyx +++ b/asyncpg/protocol/codecs/array.pyx @@ -209,7 +209,9 @@ cdef _write_textarray_data(ConnectionSettings settings, object obj, try: if not apg_strcasecmp_char(elem_str, b'NULL'): - array_data.write_bytes(b'"NULL"') + array_data.write_byte(b'"') + array_data.write_cstr(elem_str, 4) + array_data.write_byte(b'"') else: quoted_elem_len = elem_len need_quoting = False diff --git a/tests/test_codecs.py b/tests/test_codecs.py index 9b9c52b3..8ecbd092 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1774,6 +1774,43 @@ async def test_no_result(self): st = await self.con.prepare('rollback') self.assertTupleEqual(st.get_attributes(), ()) + async def test_array_with_custom_json_text_codec(self): + import json + + await self.con.execute('CREATE TABLE tab (id serial, val json[]);') + insert_sql = 'INSERT INTO tab (val) VALUES (cast($1 AS json[]));' + query_sql = 'SELECT val FROM tab ORDER BY id DESC;' + try: + for custom_codec in [False, True]: + if custom_codec: + await self.con.set_type_codec( + 'json', + encoder=lambda v: v, + decoder=json.loads, + schema="pg_catalog", + ) + + for val in ['"null"', '22', 'null', '[2]', '{"a": null}']: + await self.con.execute(insert_sql, [val]) + result = await self.con.fetchval(query_sql) + if custom_codec: + self.assertEqual(result, [json.loads(val)]) + else: + self.assertEqual(result, [val]) + + await self.con.execute(insert_sql, [None]) + result = await self.con.fetchval(query_sql) + self.assertEqual(result, [None]) + + await self.con.execute(insert_sql, None) + result = await self.con.fetchval(query_sql) + self.assertEqual(result, None) + + finally: + await self.con.execute(''' + DROP TABLE tab; + ''') + @unittest.skipIf(os.environ.get('PGHOST'), 'using remote cluster for testing') class TestCodecsLargeOIDs(tb.ConnectedTestCase): From 8b313bdec943ce2d1121f0d8cbf58f9b1851ef33 Mon Sep 17 00:00:00 2001 From: Samuel Colvin Date: Wed, 25 Nov 2020 23:07:37 +0000 Subject: [PATCH 009/193] Fix link in connect docstring (#653) --- asyncpg/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index b7266471..69997710 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1919,7 +1919,7 @@ async def connect(dsn=None, *, .. _postgres envvars: https://www.postgresql.org/docs/current/static/libpq-envars.html .. _libpq connection URI format: - https://www.postgresql.org/docs/current/static/\ + https://www.postgresql.org/docs/current/static/ libpq-connect.html#LIBPQ-CONNSTRING """ if not issubclass(connection_class, Connection): From 690048dbaaebac6172f003a9805bbb63abd28d8c Mon Sep 17 00:00:00 2001 From: Fantix King Date: Wed, 25 Nov 2020 18:02:16 -0600 Subject: [PATCH 010/193] Batch executemany (#295) Now `Bind` and `Execute` pairs are batched into 4 x 32KB buffers to take advantage of `writelines()`. A single `Sync` is sent at last, so that all args live in the same transaction. pgbench results of inserting 1000 rows per query with executemany() on Python 3.6 of 2.2GHz 2015 MacBook Air (best out of 5 runs): asyncpg 0.18.2: 710 queries in 30.31 seconds Latency: min 341.88ms; max 636.29ms; mean 425.022ms; std: 39.782ms (9.36%) Latency distribution: 25% under 401.67ms; 50% under 414.26ms; 75% under 435.37ms; 90% under 478.39ms; 99% under 576.638ms; 99.99% under 636.299ms Queries/sec: 23.42 Rows/sec: 23424.32 This patch: 4125 queries in 30.02 seconds Latency: min 23.14ms; max 734.91ms; mean 72.723ms; std: 49.226ms (67.69%) Latency distribution: 25% under 59.958ms; 50% under 65.414ms; 75% under 71.538ms; 90% under 80.95ms; 99% under 175.375ms; 99.99% under 734.912ms Queries/sec: 137.39 Rows/sec: 137389.64 This is a backwards incompatible change. Here `executemany()` becomes atomic, whereas previously any error in the middle of argument iteration would retain the results of the preceding set of arguments unless an explicit transaction block was used. Closes: #289 --- .travis.yml | 4 + asyncpg/connection.py | 10 ++ asyncpg/prepared_stmt.py | 24 +++- asyncpg/protocol/consts.pxi | 2 + asyncpg/protocol/coreproto.pxd | 12 +- asyncpg/protocol/coreproto.pyx | 184 ++++++++++++++++++++------- asyncpg/protocol/protocol.pyx | 34 ++++- tests/test_execute.py | 220 +++++++++++++++++++++++++++------ 8 files changed, 390 insertions(+), 100 deletions(-) diff --git a/.travis.yml b/.travis.yml index 680f91f1..830b0924 100644 --- a/.travis.yml +++ b/.travis.yml @@ -197,18 +197,22 @@ jobs: - name: "OSX py 3.5" os: osx + osx_image: xcode10.2 env: BUILD=tests,wheels PYTHON_VERSION=3.5.9 PGVERSION=12 - name: "OSX py 3.6" os: osx + osx_image: xcode10.2 env: BUILD=tests,wheels PYTHON_VERSION=3.6.10 PGVERSION=12 - name: "OSX py 3.7" os: osx + osx_image: xcode10.2 env: BUILD=tests,wheels PYTHON_VERSION=3.7.7 PGVERSION=12 - name: "OSX py 3.8" os: osx + osx_image: xcode10.2 env: BUILD=tests,wheels PYTHON_VERSION=3.8.3 PGVERSION=12 cache: diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 69997710..c23d93b1 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -331,6 +331,13 @@ async def executemany(self, command: str, args, *, timeout: float=None): .. versionchanged:: 0.11.0 `timeout` became a keyword-only parameter. + + .. versionchanged:: 0.22.0 + The execution was changed to be in an implicit transaction if there + was no explicit transaction, so that it will no longer end up with + partial success. If you still need the previous behavior to + progressively execute many args, please use a loop with prepared + statement instead. """ self._check_open() return await self._executemany(command, args, timeout) @@ -1010,6 +1017,9 @@ async def _copy_in(self, copy_stmt, source, timeout): f = source elif isinstance(source, collections.abc.AsyncIterable): # assuming calling output returns an awaitable. + # copy_in() is designed to handle very large amounts of data, and + # the source async iterable is allowed to return an arbitrary + # amount of data on every iteration. reader = source else: # assuming source is an instance supporting the buffer protocol. diff --git a/asyncpg/prepared_stmt.py b/asyncpg/prepared_stmt.py index 5df6b674..eeb45367 100644 --- a/asyncpg/prepared_stmt.py +++ b/asyncpg/prepared_stmt.py @@ -202,11 +202,24 @@ async def fetchrow(self, *args, timeout=None): return None return data[0] - async def __bind_execute(self, args, limit, timeout): + @connresource.guarded + async def executemany(self, args, *, timeout: float=None): + """Execute the statement for each sequence of arguments in *args*. + + :param args: An iterable containing sequences of arguments. + :param float timeout: Optional timeout value in seconds. + :return None: This method discards the results of the operations. + + .. versionadded:: 0.22.0 + """ + return await self.__do_execute( + lambda protocol: protocol.bind_execute_many( + self._state, args, '', timeout)) + + async def __do_execute(self, executor): protocol = self._connection._protocol try: - data, status, _ = await protocol.bind_execute( - self._state, args, '', limit, True, timeout) + return await executor(protocol) except exceptions.OutdatedSchemaCacheError: await self._connection.reload_schema_state() # We can not find all manually created prepared statements, so just @@ -215,6 +228,11 @@ async def __bind_execute(self, args, limit, timeout): # invalidate themselves (unfortunately, clearing caches again). self._state.mark_closed() raise + + async def __bind_execute(self, args, limit, timeout): + data, status, _ = await self.__do_execute( + lambda protocol: protocol.bind_execute( + self._state, args, '', limit, True, timeout)) self._last_status = status return data diff --git a/asyncpg/protocol/consts.pxi b/asyncpg/protocol/consts.pxi index 97cbbf35..e1f8726e 100644 --- a/asyncpg/protocol/consts.pxi +++ b/asyncpg/protocol/consts.pxi @@ -8,3 +8,5 @@ DEF _MAXINT32 = 2**31 - 1 DEF _COPY_BUFFER_SIZE = 524288 DEF _COPY_SIGNATURE = b"PGCOPY\n\377\r\n\0" +DEF _EXECUTE_MANY_BUF_NUM = 4 +DEF _EXECUTE_MANY_BUF_SIZE = 32768 diff --git a/asyncpg/protocol/coreproto.pxd b/asyncpg/protocol/coreproto.pxd index c96b1fa5..f21559b4 100644 --- a/asyncpg/protocol/coreproto.pxd +++ b/asyncpg/protocol/coreproto.pxd @@ -114,6 +114,7 @@ cdef class CoreProtocol: # True - completed, False - suspended bint result_execute_completed + cpdef is_in_transaction(self) cdef _process__auth(self, char mtype) cdef _process__prepare(self, char mtype) cdef _process__bind_execute(self, char mtype) @@ -146,6 +147,7 @@ cdef class CoreProtocol: cdef _auth_password_message_sasl_continue(self, bytes server_response) cdef _write(self, buf) + cdef _writelines(self, list buffers) cdef _read_server_messages(self) @@ -155,9 +157,13 @@ cdef class CoreProtocol: cdef _ensure_connected(self) + cdef WriteBuffer _build_parse_message(self, str stmt_name, str query) cdef WriteBuffer _build_bind_message(self, str portal_name, str stmt_name, WriteBuffer bind_data) + cdef WriteBuffer _build_empty_bind_data(self) + cdef WriteBuffer _build_execute_message(self, str portal_name, + int32_t limit) cdef _connect(self) @@ -166,8 +172,10 @@ cdef class CoreProtocol: WriteBuffer bind_data, int32_t limit) cdef _bind_execute(self, str portal_name, str stmt_name, WriteBuffer bind_data, int32_t limit) - cdef _bind_execute_many(self, str portal_name, str stmt_name, - object bind_data) + cdef bint _bind_execute_many(self, str portal_name, str stmt_name, + object bind_data) + cdef bint _bind_execute_many_more(self, bint first=*) + cdef _bind_execute_many_fail(self, object error, bint first=*) cdef _bind(self, str portal_name, str stmt_name, WriteBuffer bind_data) cdef _execute(self, str portal_name, int32_t limit) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index fdc26ec6..12ebf6c6 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -27,13 +27,13 @@ cdef class CoreProtocol: # type of `scram` is `SCRAMAuthentcation` self.scram = None - # executemany support data - self._execute_iter = None - self._execute_portal_name = None - self._execute_stmt_name = None - self._reset_result() + cpdef is_in_transaction(self): + # PQTRANS_INTRANS = idle, within transaction block + # PQTRANS_INERROR = idle, within failed transaction + return self.xact_status in (PQTRANS_INTRANS, PQTRANS_INERROR) + cdef _read_server_messages(self): cdef: char mtype @@ -263,27 +263,16 @@ cdef class CoreProtocol: elif mtype == b'Z': # ReadyForQuery self._parse_msg_ready_for_query() - if self.result_type == RESULT_FAILED: - self._push_result() - else: - try: - buf = next(self._execute_iter) - except StopIteration: - self._push_result() - except Exception as e: - self.result_type = RESULT_FAILED - self.result = e - self._push_result() - else: - # Next iteration over the executemany() arg sequence - self._send_bind_message( - self._execute_portal_name, self._execute_stmt_name, - buf, 0) + self._push_result() elif mtype == b'I': # EmptyQueryResponse self.buffer.discard_message() + elif mtype == b'1': + # ParseComplete + self.buffer.discard_message() + cdef _process__bind(self, char mtype): if mtype == b'E': # ErrorResponse @@ -730,6 +719,11 @@ cdef class CoreProtocol: self.result_execute_completed = False self._discard_data = False + # executemany support data + self._execute_iter = None + self._execute_portal_name = None + self._execute_stmt_name = None + cdef _set_state(self, ProtocolState new_state): if new_state == PROTOCOL_IDLE: if self.state == PROTOCOL_FAILED: @@ -780,6 +774,17 @@ cdef class CoreProtocol: if self.con_status != CONNECTION_OK: raise apg_exc.InternalClientError('not connected') + cdef WriteBuffer _build_parse_message(self, str stmt_name, str query): + cdef WriteBuffer buf + + buf = WriteBuffer.new_message(b'P') + buf.write_str(stmt_name, self.encoding) + buf.write_str(query, self.encoding) + buf.write_int16(0) + + buf.end_message() + return buf + cdef WriteBuffer _build_bind_message(self, str portal_name, str stmt_name, WriteBuffer bind_data): @@ -795,6 +800,25 @@ cdef class CoreProtocol: buf.end_message() return buf + cdef WriteBuffer _build_empty_bind_data(self): + cdef WriteBuffer buf + buf = WriteBuffer.new() + buf.write_int16(0) # The number of parameter format codes + buf.write_int16(0) # The number of parameter values + buf.write_int16(0) # The number of result-column format codes + return buf + + cdef WriteBuffer _build_execute_message(self, str portal_name, + int32_t limit): + cdef WriteBuffer buf + + buf = WriteBuffer.new_message(b'E') + buf.write_str(portal_name, self.encoding) # name of the portal + buf.write_int32(limit) # number of rows to return; 0 - all + + buf.end_message() + return buf + # API for subclasses cdef _connect(self): @@ -845,12 +869,7 @@ cdef class CoreProtocol: self._ensure_connected() self._set_state(PROTOCOL_PREPARE) - buf = WriteBuffer.new_message(b'P') - buf.write_str(stmt_name, self.encoding) - buf.write_str(query, self.encoding) - buf.write_int16(0) - buf.end_message() - packet = buf + packet = self._build_parse_message(stmt_name, query) buf = WriteBuffer.new_message(b'D') buf.write_byte(b'S') @@ -872,10 +891,7 @@ cdef class CoreProtocol: buf = self._build_bind_message(portal_name, stmt_name, bind_data) packet = buf - buf = WriteBuffer.new_message(b'E') - buf.write_str(portal_name, self.encoding) # name of the portal - buf.write_int32(limit) # number of rows to return; 0 - all - buf.end_message() + buf = self._build_execute_message(portal_name, limit) packet.write_buffer(buf) packet.write_bytes(SYNC_MESSAGE) @@ -894,11 +910,8 @@ cdef class CoreProtocol: self._send_bind_message(portal_name, stmt_name, bind_data, limit) - cdef _bind_execute_many(self, str portal_name, str stmt_name, - object bind_data): - - cdef WriteBuffer buf - + cdef bint _bind_execute_many(self, str portal_name, str stmt_name, + object bind_data): self._ensure_connected() self._set_state(PROTOCOL_BIND_EXECUTE_MANY) @@ -907,17 +920,92 @@ cdef class CoreProtocol: self._execute_iter = bind_data self._execute_portal_name = portal_name self._execute_stmt_name = stmt_name + return self._bind_execute_many_more(True) - try: - buf = next(bind_data) - except StopIteration: - self._push_result() - except Exception as e: - self.result_type = RESULT_FAILED - self.result = e + cdef bint _bind_execute_many_more(self, bint first=False): + cdef: + WriteBuffer packet + WriteBuffer buf + list buffers = [] + + # as we keep sending, the server may return an error early + if self.result_type == RESULT_FAILED: + self._write(SYNC_MESSAGE) + return False + + # collect up to four 32KB buffers to send + # https://github.com/MagicStack/asyncpg/pull/289#issuecomment-391215051 + while len(buffers) < _EXECUTE_MANY_BUF_NUM: + packet = WriteBuffer.new() + + # fill one 32KB buffer + while packet.len() < _EXECUTE_MANY_BUF_SIZE: + try: + # grab one item from the input + buf = next(self._execute_iter) + + # reached the end of the input + except StopIteration: + if first: + # if we never send anything, simply set the result + self._push_result() + else: + # otherwise, append SYNC and send the buffers + packet.write_bytes(SYNC_MESSAGE) + buffers.append(packet) + self._writelines(buffers) + return False + + # error in input, give up the buffers and cleanup + except Exception as ex: + self._bind_execute_many_fail(ex, first) + return False + + # all good, write to the buffer + first = False + packet.write_buffer( + self._build_bind_message( + self._execute_portal_name, + self._execute_stmt_name, + buf, + ) + ) + packet.write_buffer( + self._build_execute_message(self._execute_portal_name, 0, + ) + ) + + # collected one buffer + buffers.append(packet) + + # write to the wire, and signal the caller for more to send + self._writelines(buffers) + return True + + cdef _bind_execute_many_fail(self, object error, bint first=False): + cdef WriteBuffer buf + + self.result_type = RESULT_FAILED + self.result = error + if first: self._push_result() + elif self.is_in_transaction(): + # we're in an explicit transaction, just SYNC + self._write(SYNC_MESSAGE) else: - self._send_bind_message(portal_name, stmt_name, buf, 0) + # In an implicit transaction, if `ignore_till_sync` is set, + # `ROLLBACK` will be ignored and `Sync` will restore the state; + # or the transaction will be rolled back with a warning saying + # that there was no transaction, but rollback is done anyway, + # so we could safely ignore this warning. + # GOTCHA: cannot use simple query message here, because it is + # ignored if `ignore_till_sync` is set. + buf = self._build_parse_message('', 'ROLLBACK') + buf.write_buffer(self._build_bind_message( + '', '', self._build_empty_bind_data())) + buf.write_buffer(self._build_execute_message('', 0)) + buf.write_bytes(SYNC_MESSAGE) + self._write(buf) cdef _execute(self, str portal_name, int32_t limit): cdef WriteBuffer buf @@ -927,10 +1015,7 @@ cdef class CoreProtocol: self.result = [] - buf = WriteBuffer.new_message(b'E') - buf.write_str(portal_name, self.encoding) # name of the portal - buf.write_int32(limit) # number of rows to return; 0 - all - buf.end_message() + buf = self._build_execute_message(portal_name, limit) buf.write_bytes(SYNC_MESSAGE) @@ -1013,6 +1098,9 @@ cdef class CoreProtocol: cdef _write(self, buf): raise NotImplementedError + cdef _writelines(self, list buffers): + raise NotImplementedError + cdef _decode_row(self, const char* buf, ssize_t buf_len): pass diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index a6d9ad5d..4df256e6 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -126,11 +126,6 @@ cdef class BaseProtocol(CoreProtocol): def get_record_class(self): return self.record_class - def is_in_transaction(self): - # PQTRANS_INTRANS = idle, within transaction block - # PQTRANS_INERROR = idle, within failed transaction - return self.xact_status in (PQTRANS_INTRANS, PQTRANS_INERROR) - cdef inline resume_reading(self): if not self.is_reading: self.is_reading = True @@ -215,6 +210,7 @@ cdef class BaseProtocol(CoreProtocol): self._check_state() timeout = self._get_timeout_impl(timeout) + timer = Timer(timeout) # Make sure the argument sequence is encoded lazily with # this generator expression to keep the memory pressure under @@ -224,7 +220,7 @@ cdef class BaseProtocol(CoreProtocol): waiter = self._new_waiter(timeout) try: - self._bind_execute_many( + more = self._bind_execute_many( portal_name, state.name, arg_bufs) # network op @@ -233,6 +229,22 @@ cdef class BaseProtocol(CoreProtocol): self.statement = state self.return_extra = False self.queries_count += 1 + + while more: + with timer: + await asyncio.wait_for( + self.writing_allowed.wait(), + timeout=timer.get_remaining_budget()) + # On Windows the above event somehow won't allow context + # switch, so forcing one with sleep(0) here + await asyncio.sleep(0) + if not timer.has_budget_greater_than(0): + raise asyncio.TimeoutError + more = self._bind_execute_many_more() # network op + + except asyncio.TimeoutError as e: + self._bind_execute_many_fail(e) # network op + except Exception as ex: waiter.set_exception(ex) self._coreproto_error() @@ -893,6 +905,9 @@ cdef class BaseProtocol(CoreProtocol): cdef _write(self, buf): self.transport.write(memoryview(buf)) + cdef _writelines(self, list buffers): + self.transport.writelines(buffers) + # asyncio callbacks: def data_received(self, data): @@ -945,6 +960,13 @@ class Timer: def get_remaining_budget(self): return self._budget + def has_budget_greater_than(self, amount): + if self._budget is None: + # Unlimited budget. + return True + else: + return self._budget > amount + class Protocol(BaseProtocol, asyncio.Protocol): pass diff --git a/tests/test_execute.py b/tests/test_execute.py index 5ecc100f..8cf0d2f2 100644 --- a/tests/test_execute.py +++ b/tests/test_execute.py @@ -9,6 +9,7 @@ import asyncpg from asyncpg import _testbase as tb +from asyncpg.exceptions import UniqueViolationError class TestExecuteScript(tb.ConnectedTestCase): @@ -97,57 +98,194 @@ async def test_execute_script_interrupted_terminate(self): self.con.terminate() - async def test_execute_many_1(self): - await self.con.execute('CREATE TEMP TABLE exmany (a text, b int)') - try: - result = await self.con.executemany(''' - INSERT INTO exmany VALUES($1, $2) - ''', [ - ('a', 1), ('b', 2), ('c', 3), ('d', 4) - ]) +class TestExecuteMany(tb.ConnectedTestCase): + def setUp(self): + super().setUp() + self.loop.run_until_complete(self.con.execute( + 'CREATE TABLE exmany (a text, b int PRIMARY KEY)')) - self.assertIsNone(result) + def tearDown(self): + self.loop.run_until_complete(self.con.execute('DROP TABLE exmany')) + super().tearDown() - result = await self.con.fetch(''' - SELECT * FROM exmany - ''') + async def test_executemany_basic(self): + result = await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) - self.assertEqual(result, [ - ('a', 1), ('b', 2), ('c', 3), ('d', 4) - ]) + self.assertIsNone(result) - # Empty set - result = await self.con.executemany(''' - INSERT INTO exmany VALUES($1, $2) - ''', ()) + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') - result = await self.con.fetch(''' - SELECT * FROM exmany - ''') + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) - self.assertEqual(result, [ - ('a', 1), ('b', 2), ('c', 3), ('d', 4) - ]) - finally: - await self.con.execute('DROP TABLE exmany') + # Empty set + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', ()) - async def test_execute_many_2(self): - await self.con.execute('CREATE TEMP TABLE exmany (b int)') + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') - try: - bad_data = ([1 / 0] for v in range(10)) + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) - with self.assertRaises(ZeroDivisionError): - async with self.con.transaction(): - await self.con.executemany(''' - INSERT INTO exmany VALUES($1) - ''', bad_data) + async def test_executemany_bad_input(self): + bad_data = ([1 / 0] for v in range(10)) - good_data = ([v] for v in range(10)) + with self.assertRaises(ZeroDivisionError): async with self.con.transaction(): await self.con.executemany(''' - INSERT INTO exmany VALUES($1) - ''', good_data) - finally: - await self.con.execute('DROP TABLE exmany') + INSERT INTO exmany (b)VALUES($1) + ''', bad_data) + + good_data = ([v] for v in range(10)) + async with self.con.transaction(): + await self.con.executemany(''' + INSERT INTO exmany (b)VALUES($1) + ''', good_data) + + async def test_executemany_server_failure(self): + with self.assertRaises(UniqueViolationError): + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', [ + ('a', 1), ('b', 2), ('c', 2), ('d', 4) + ]) + result = await self.con.fetch('SELECT * FROM exmany') + self.assertEqual(result, []) + + async def test_executemany_server_failure_after_writes(self): + with self.assertRaises(UniqueViolationError): + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', [('a' * 32768, x) for x in range(10)] + [ + ('b', 12), ('c', 12), ('d', 14) + ]) + result = await self.con.fetch('SELECT b FROM exmany') + self.assertEqual(result, []) + + async def test_executemany_server_failure_during_writes(self): + # failure at the beginning, server error detected in the middle + pos = 0 + + def gen(): + nonlocal pos + while pos < 128: + pos += 1 + if pos < 3: + yield ('a', 0) + else: + yield 'a' * 32768, pos + + with self.assertRaises(UniqueViolationError): + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', gen()) + result = await self.con.fetch('SELECT b FROM exmany') + self.assertEqual(result, []) + self.assertLess(pos, 128, 'should stop early') + + async def test_executemany_client_failure_after_writes(self): + with self.assertRaises(ZeroDivisionError): + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', (('a' * 32768, y + y / y) for y in range(10, -1, -1))) + result = await self.con.fetch('SELECT b FROM exmany') + self.assertEqual(result, []) + + async def test_executemany_timeout(self): + with self.assertRaises(asyncio.TimeoutError): + await self.con.executemany(''' + INSERT INTO exmany VALUES(pg_sleep(0.1) || $1, $2) + ''', [('a' * 32768, x) for x in range(128)], timeout=0.5) + result = await self.con.fetch('SELECT * FROM exmany') + self.assertEqual(result, []) + + async def test_executemany_timeout_flow_control(self): + event = asyncio.Event() + + async def locker(): + test_func = getattr(self, self._testMethodName).__func__ + opts = getattr(test_func, '__connect_options__', {}) + conn = await self.connect(**opts) + try: + tx = conn.transaction() + await tx.start() + await conn.execute("UPDATE exmany SET a = '1' WHERE b = 10") + event.set() + await asyncio.sleep(1) + await tx.rollback() + finally: + event.set() + await conn.close() + + await self.con.executemany(''' + INSERT INTO exmany VALUES(NULL, $1) + ''', [(x,) for x in range(128)]) + fut = asyncio.ensure_future(locker()) + await event.wait() + with self.assertRaises(asyncio.TimeoutError): + await self.con.executemany(''' + UPDATE exmany SET a = $1 WHERE b = $2 + ''', [('a' * 32768, x) for x in range(128)], timeout=0.5) + await fut + result = await self.con.fetch( + 'SELECT * FROM exmany WHERE a IS NOT NULL') + self.assertEqual(result, []) + + async def test_executemany_client_failure_in_transaction(self): + tx = self.con.transaction() + await tx.start() + with self.assertRaises(ZeroDivisionError): + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, $2) + ''', (('a' * 32768, y + y / y) for y in range(10, -1, -1))) + result = await self.con.fetch('SELECT b FROM exmany') + # only 2 batches executed (2 x 4) + self.assertEqual( + [x[0] for x in result], [y + 1 for y in range(10, 2, -1)]) + await tx.rollback() + result = await self.con.fetch('SELECT b FROM exmany') + self.assertEqual(result, []) + + async def test_executemany_client_server_failure_conflict(self): + self.con._transport.set_write_buffer_limits(65536 * 64, 16384 * 64) + with self.assertRaises(UniqueViolationError): + await self.con.executemany(''' + INSERT INTO exmany VALUES($1, 0) + ''', (('a' * 32768,) for y in range(4, -1, -1) if y / y)) + result = await self.con.fetch('SELECT b FROM exmany') + self.assertEqual(result, []) + + async def test_executemany_prepare(self): + stmt = await self.con.prepare(''' + INSERT INTO exmany VALUES($1, $2) + ''') + result = await stmt.executemany([ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) + self.assertIsNone(result) + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) + # Empty set + await stmt.executemany(()) + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) From 5ddabb191ffe72f9c1f733451610d1a363ebcb8a Mon Sep 17 00:00:00 2001 From: Kevin Messer Date: Thu, 26 Nov 2020 01:10:01 +0100 Subject: [PATCH 011/193] Bump pgproto to fix pyinstaller (#651) Fixes #649 --- asyncpg/pgproto | 2 +- setup.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/asyncpg/pgproto b/asyncpg/pgproto index 76091445..2dc7cc46 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit 76091445db8b49a7d78504b47eb34fcbfbb89567 +Subproject commit 2dc7cc4635b08418dfbc6105b45947244102e86d diff --git a/setup.py b/setup.py index 5bb9e693..7ed4064c 100644 --- a/setup.py +++ b/setup.py @@ -291,6 +291,7 @@ def finalize_options(self): extra_compile_args=CFLAGS, extra_link_args=LDFLAGS), ], + install_requires=['typing-extensions>=3.7.4.3;python_version<"3.8"'], cmdclass={'build_ext': build_ext, 'build_py': build_py, 'sdist': sdist}, test_suite='tests.suite', extras_require=EXTRA_DEPENDENCIES, From 0d23182015d87ebab2fefe85d31e981a3ff09412 Mon Sep 17 00:00:00 2001 From: Peter Cooner Date: Wed, 25 Nov 2020 16:12:09 -0800 Subject: [PATCH 012/193] Fix AttributeError exception (#632) Accessing settings._settings raises an AttributeError exception because object.__getattr__ does not exist. --- asyncpg/protocol/settings.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/protocol/settings.pyx b/asyncpg/protocol/settings.pyx index 6121fce4..9ab32f39 100644 --- a/asyncpg/protocol/settings.pyx +++ b/asyncpg/protocol/settings.pyx @@ -107,7 +107,7 @@ cdef class ConnectionSettings(pgproto.CodecContext): except KeyError: raise AttributeError(name) from None - return object.__getattr__(self, name) + return object.__getattribute__(self, name) def __repr__(self): return ''.format(self._settings) From 7c77c33355a9c228d23f67f9e9b3383a7a801a96 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 25 Nov 2020 18:47:56 -0800 Subject: [PATCH 013/193] Add a note that DSN components must be URL-quoted Closes: #598. --- asyncpg/connection.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index c23d93b1..5942d920 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1773,6 +1773,11 @@ async def connect(dsn=None, *, Unlike libpq, asyncpg will treat unrecognized options as `server settings`_ to be used for the connection. + .. note:: + + The URI must be *valid*, which means that all components must + be properly quoted with :py:func:`urllib.parse.quote`. + :param host: Database host address as one of the following: From e5856612ed1fa2761c815cc1bd649c13a5b1ce83 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 26 Nov 2020 21:28:52 -0800 Subject: [PATCH 014/193] Switch to Github Actions --- .ci/appveyor.yml | 60 ---- .ci/build-manylinux-wheels.sh | 30 -- .ci/package-version.py | 25 -- .ci/push_key.enc | Bin 3248 -> 0 bytes .ci/pypi-check.py | 30 -- .ci/requirements-publish.txt | 2 - .ci/s3-download-release.py | 74 ----- .ci/s3-upload.py | 62 ---- .ci/travis-before-install.sh | 51 ---- .ci/travis-build-docs.sh | 11 - .ci/travis-build-wheels.sh | 85 ------ .ci/travis-install.sh | 13 - .ci/travis-publish-docs.sh | 70 ----- .ci/travis-release.sh | 60 ---- .ci/travis-tests.sh | 28 -- .github/workflows/build-manylinux-wheels.sh | 25 ++ .github/workflows/install-postgres.sh | 16 + .github/workflows/release.yml | 309 ++++++++++++++++++++ .github/workflows/test-wheels.sh | 7 + .github/workflows/tests.yml | 137 +++++++++ .travis.yml | 243 --------------- Makefile | 2 +- asyncpg/__init__.py | 21 +- asyncpg/_version.py | 13 + asyncpg/cluster.py | 63 ++-- docs/conf.py | 4 +- setup.py | 6 +- tests/test_listeners.py | 9 +- 28 files changed, 552 insertions(+), 904 deletions(-) delete mode 100644 .ci/appveyor.yml delete mode 100755 .ci/build-manylinux-wheels.sh delete mode 100755 .ci/package-version.py delete mode 100644 .ci/push_key.enc delete mode 100755 .ci/pypi-check.py delete mode 100644 .ci/requirements-publish.txt delete mode 100755 .ci/s3-download-release.py delete mode 100755 .ci/s3-upload.py delete mode 100755 .ci/travis-before-install.sh delete mode 100755 .ci/travis-build-docs.sh delete mode 100755 .ci/travis-build-wheels.sh delete mode 100755 .ci/travis-install.sh delete mode 100755 .ci/travis-publish-docs.sh delete mode 100755 .ci/travis-release.sh delete mode 100755 .ci/travis-tests.sh create mode 100755 .github/workflows/build-manylinux-wheels.sh create mode 100755 .github/workflows/install-postgres.sh create mode 100644 .github/workflows/release.yml create mode 100755 .github/workflows/test-wheels.sh create mode 100644 .github/workflows/tests.yml delete mode 100644 .travis.yml create mode 100644 asyncpg/_version.py diff --git a/.ci/appveyor.yml b/.ci/appveyor.yml deleted file mode 100644 index 6723c440..00000000 --- a/.ci/appveyor.yml +++ /dev/null @@ -1,60 +0,0 @@ -services: - - postgresql95 - -environment: - global: - PGINSTALLATION: C:\\Program Files\\PostgreSQL\\9.5\\bin - S3_UPLOAD_USERNAME: oss-ci-bot - S3_UPLOAD_BUCKET: magicstack-oss-releases - S3_UPLOAD_ACCESSKEY: - secure: 1vmOqSXq5zDN8UdezZ3H4l0A9LUJiTr7Wuy9whCdffE= - S3_UPLOAD_SECRET: - secure: XudOvV6WtY9yRoqKahXMswFth8SF1UTnSXws4UBjeqzQUjOx2V2VRvIdpPfiqUKt - - matrix: - - PYTHON: "C:\\Python35\\python.exe" - - PYTHON: "C:\\Python35-x64\\python.exe" - - PYTHON: "C:\\Python36\\python.exe" - - PYTHON: "C:\\Python36-x64\\python.exe" - - PYTHON: "C:\\Python37\\python.exe" - - PYTHON: "C:\\Python37-x64\\python.exe" - - PYTHON: "C:\\Python38\\python.exe" - - PYTHON: "C:\\Python38-x64\\python.exe" - -branches: - # Avoid building PR branches. - only: - - master - - ci - - releases - -install: - - git submodule update --init --recursive - - "%PYTHON% -m pip install --upgrade pip wheel setuptools" - -build_script: - - "%PYTHON% setup.py build_ext --inplace --cython-always" - -test_script: - - "%PYTHON% setup.py --verbose test" - -after_test: - - "%PYTHON% setup.py bdist_wheel" - -artifacts: - - path: dist\* - -deploy_script: - - ps: | - if ($env:appveyor_repo_branch -eq 'releases') { - & "$env:PYTHON" -m pip install -U -r ".ci/requirements-publish.txt" - $PACKAGE_VERSION = & "$env:PYTHON" ".ci/package-version.py" - $PYPI_VERSION = & "$env:PYTHON" ".ci/pypi-check.py" "asyncpg" - - if ($PACKAGE_VERSION -eq $PYPI_VERSION) { - Write-Error "asyncpg-$PACKAGE_VERSION is already published on PyPI" - exit 1 - } - - & "$env:PYTHON" ".ci/s3-upload.py" dist\*.whl - } diff --git a/.ci/build-manylinux-wheels.sh b/.ci/build-manylinux-wheels.sh deleted file mode 100755 index 40030785..00000000 --- a/.ci/build-manylinux-wheels.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -set -e -x - -# iproute isn't included in CentOS 7 -yum install -y iproute - -# Compile wheels -PYTHON="/opt/python/${PYTHON_VERSION}/bin/python" -PIP="/opt/python/${PYTHON_VERSION}/bin/pip" -${PIP} install --upgrade setuptools pip wheel -cd /io -make clean -${PYTHON} setup.py bdist_wheel - -# Bundle external shared libraries into the wheels. -for whl in /io/dist/*.whl; do - auditwheel repair $whl -w /tmp/ - ${PIP} install /tmp/*.whl - mv /tmp/*.whl /io/dist/ - rm /io/dist/*-linux_*.whl -done - -# Grab docker host, where Postgres should be running. -export PGHOST=$(ip route | awk '/default/ { print $3 }' | uniq) -export PGUSER="postgres" - -rm -rf /io/tests/__pycache__ -make -C /io PYTHON="${PYTHON}" testinstalled -rm -rf /io/tests/__pycache__ diff --git a/.ci/package-version.py b/.ci/package-version.py deleted file mode 100755 index 59d864fe..00000000 --- a/.ci/package-version.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 - - -import os.path -import sys - - -def main(): - version_file = os.path.join( - os.path.dirname(os.path.dirname(__file__)), 'asyncpg', '__init__.py') - - with open(version_file, 'r') as f: - for line in f: - if line.startswith('__version__ ='): - _, _, version = line.partition('=') - print(version.strip(" \n'\"")) - return 0 - - print('could not find package version in asyncpg/__init__.py', - file=sys.stderr) - return 1 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/.ci/push_key.enc b/.ci/push_key.enc deleted file mode 100644 index ae261920449f86f34458dd8dff6da57b6f86b6ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3248 zcmV;h3{UfZ1W05KN2j?hWWEOtl4QD)5}{jg@AfUGr`6qvI*qIcjaJPw5C}f(rd%Sn z(rVxVaAy7)U`dvF`Wp~YwJ7#s8zv(<&rrkO1i2Sz3XAyKpT6)p7{O%G%G9w_6?r(Y zt6``oi{)#ozc}-Gylh(&6{J0>8*hyiQsDiS0Dd7$Y ztOKEwS6={n-Pe11DPMPq#6Fl&@AaQK&uRxq&>~AXb}b#t#vwy5x{~^t=4l)L>&k1AMZO16maSDx`?Rg z-Do#|dub*W)$-=#lZkNEUNINna<5i~t{^s2;Jgul?y^}o6n;BYV$q9Oc{aqRY-GJx zg^jHEf>Z&%@bn-i`oLVt+fOH$QA{*k9#z$!H<1z7sS-13QMk!VF^fL-A$57 zju>K!i;pO={PtP|O3nmKsX%Rn?(DI4BO)(u$nuF$)|cF%DM&OZ zQ;O9G^27}`4vcis^<2$V?nU}AR&A{l=7x^4Xn|&)0aw`yG&PT1!xy2NjY(Q!YGg!) zDtvp4sH3FMLbSk@nHT(l0Dn(mAkUJDfo8wS<(twot}--EB8Zc=JRF?pX}1pqqUiBkbFt3Mul zN#rw>-6?$hMf4F^N+%BrC$bTDF<84~$|?|yRAv;zAh9BadA`w3d}1SGkk|XQ?!Vh>dWp+YsR4NNkU!dWTRr;>iHdUd;DgQ}Qqq z?jZ8FUb(2Tc%%m^g%{n8HYm`hqlTopu1jBe&D`4rN7eF)Yp>z=@AN~~zb-@vp)+qo zPD1|S1H-BT8g71K%QQE{73z6Bj?18q?P_zfuDtM`}6u&IiTmy>N*g6Jfxka+};?EO2y2 zg?%-Bx(V3olPL~*|2Y`Nl4IRAP$=p@nz8Y0Y%}dGMvO+tu7nASu$QO!pUG$tl9Qmc z93GP09;kl&I&FdD0fzyWC2Wr~xmausip;$acEsVmhWI;ZA`b4>dV}vR_Zpuy!IBWA z15k!)mNiVa;bXSN}f2K8m=@%I3|=`eob|S ztu!1Fo1(%rDU9k;myJ*|80hYGq+Y}eR^0N~F4M#w#99L74GGk2wjzETOp=hK5eb)4 z%wB%cScYPy^!x5Bs)vTC6o#_MI<{#w^q`l(LJ2Uq3!qBYji@QnY1kLDc>d-tqE1^c zpF+zEClC1}hu3Yas9|JL$ADVHVtn{}jQP#>CcQ@P;HCMz(fFSe`c}bKc4&|^H@*+` zH(Y#*OxY9^ac}iYi;_-WD8OdD0TnND4x*m)5Wp;I%50eEy;uK^D&*nf$rp!6X5yWO zOW%<@FlfkwH|*HYSov|*epC-973}auAjTx|brgG8?mk9$mT0_i#-g)M<*5)4eQ9qIi+aH9VYaD?M{x@3o{Al@pvXrQy4ij>jSe2iVC37mGXE@qMp}B~ z8{S_@VVVYb&X;Ua=(co|d{iUY9&CM#uB^VwL!*wC4aYNWT$$V_AU*n4Jy$lI)tzBS zbcv0sLRCn_BzC0&EF#W|gJm{$qDK_7cbDa;901pSKjONE5j^IatP0Jqi6B!b6#eG6wh32hjO3@ zenr>K`|N{JfKQ1L&w^ZV!EI?ByR!mT8T@avsNDk$%91j&SR8 znFHgjvj;Cv#ue7#?L!MJ#v0tJm&uwJAjs{rhi~W+TXA5eLiJ(R%~3sYBNi4QyUB z7VA{`|1>)^X5EMp9WN5Q{X+S?Q|z01h@^|U86)-lII}PSm`nwPi)@}EbCVFf`!yY} z!6C#+$-G!iN4F@9tyEQz3Uu7Z_(((2cP}K1?4IbVd2MY@Ei?(>_uX3~oM=UjB-`cL z=0Bb0==~X)wcjMvQiLbx-$0HZ^rB&wT4}x>ef7?tlyvqn9)zy>1xv;Ba55vl-T8xA zFDXb;p()b0B?~322t-=ZLd#pGD^3fnJR^--K8H5ETW|NeT3vI%v>GAY71%_w#a{Bu zaD2t_5vH*8^1H!R$|Oefw8p~V*jQa2S5BZkgqu^MKbZux&L{%I=pJ25IFCO8h`k^UbIN9ERHvh?Nh4st&S*~L&<5D;Kb6+J`B7&I2ep2(j9m7V@y@>F#&jwc@>*OqG?Q+tA=0}H zcx}rx5a_uWpA-9wZo)@3&*6|$CdQ_ZnuT0Vj`L@ukYngr%s>B4S{N6O;9iO>u-b45z#cp-1jDv|5fmrSo{8^$8ZW>u!X%Q4J$D`0({iXr_ywH3O%u%5{ z(iFPpHlTB`(LYid<8%?Z0<#kFoMuTFCN%%uP{CwL{wAlnwyB ztX?GvHPZzIeVmVOoX4I3jwj{Gh(jYDh27fzf2 diff --git a/.ci/pypi-check.py b/.ci/pypi-check.py deleted file mode 100755 index 1b9c11c4..00000000 --- a/.ci/pypi-check.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python3 - - -import argparse -import sys -import xmlrpc.client - - -def main(): - parser = argparse.ArgumentParser(description='PyPI package checker') - parser.add_argument('package_name', metavar='PACKAGE-NAME') - - parser.add_argument( - '--pypi-index-url', - help=('PyPI index URL.'), - default='https://pypi.python.org/pypi') - - args = parser.parse_args() - - pypi = xmlrpc.client.ServerProxy(args.pypi_index_url) - releases = pypi.package_releases(args.package_name) - - if releases: - print(next(iter(sorted(releases, reverse=True)))) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/.ci/requirements-publish.txt b/.ci/requirements-publish.txt deleted file mode 100644 index 403ef596..00000000 --- a/.ci/requirements-publish.txt +++ /dev/null @@ -1,2 +0,0 @@ -tinys3 -twine diff --git a/.ci/s3-download-release.py b/.ci/s3-download-release.py deleted file mode 100755 index 223f7f17..00000000 --- a/.ci/s3-download-release.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 - - -import argparse -import os -import os.path -import sys -import urllib.request - -import tinys3 - - -def main(): - parser = argparse.ArgumentParser(description='S3 File Uploader') - parser.add_argument( - '--s3-bucket', - help=('S3 bucket name (defaults to $S3_UPLOAD_BUCKET)'), - default=os.environ.get('S3_UPLOAD_BUCKET')) - parser.add_argument( - '--s3-region', - help=('S3 region (defaults to $S3_UPLOAD_REGION)'), - default=os.environ.get('S3_UPLOAD_REGION')) - parser.add_argument( - '--s3-username', - help=('S3 username (defaults to $S3_UPLOAD_USERNAME)'), - default=os.environ.get('S3_UPLOAD_USERNAME')) - parser.add_argument( - '--s3-key', - help=('S3 access key (defaults to $S3_UPLOAD_ACCESSKEY)'), - default=os.environ.get('S3_UPLOAD_ACCESSKEY')) - parser.add_argument( - '--s3-secret', - help=('S3 secret (defaults to $S3_UPLOAD_SECRET)'), - default=os.environ.get('S3_UPLOAD_SECRET')) - parser.add_argument( - '--destdir', - help='Destination directory.') - parser.add_argument( - 'package', metavar='PACKAGE', - help='Package name and version to download.') - - args = parser.parse_args() - - if args.s3_region: - endpoint = 's3-{}.amazonaws.com'.format(args.s3_region.lower()) - else: - endpoint = 's3.amazonaws.com' - - conn = tinys3.Connection( - access_key=args.s3_key, - secret_key=args.s3_secret, - default_bucket=args.s3_bucket, - tls=True, - endpoint=endpoint, - ) - - files = [] - - for entry in conn.list(args.package): - files.append(entry['key']) - - destdir = args.destdir or os.getpwd() - - for file in files: - print('Downloading {}...'.format(file)) - url = 'https://{}/{}/{}'.format(endpoint, args.s3_bucket, file) - target = os.path.join(destdir, file) - urllib.request.urlretrieve(url, target) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/.ci/s3-upload.py b/.ci/s3-upload.py deleted file mode 100755 index 92479afe..00000000 --- a/.ci/s3-upload.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 - - -import argparse -import glob -import os -import os.path -import sys - -import tinys3 - - -def main(): - parser = argparse.ArgumentParser(description='S3 File Uploader') - parser.add_argument( - '--s3-bucket', - help=('S3 bucket name (defaults to $S3_UPLOAD_BUCKET)'), - default=os.environ.get('S3_UPLOAD_BUCKET')) - parser.add_argument( - '--s3-region', - help=('S3 region (defaults to $S3_UPLOAD_REGION)'), - default=os.environ.get('S3_UPLOAD_REGION')) - parser.add_argument( - '--s3-username', - help=('S3 username (defaults to $S3_UPLOAD_USERNAME)'), - default=os.environ.get('S3_UPLOAD_USERNAME')) - parser.add_argument( - '--s3-key', - help=('S3 access key (defaults to $S3_UPLOAD_ACCESSKEY)'), - default=os.environ.get('S3_UPLOAD_ACCESSKEY')) - parser.add_argument( - '--s3-secret', - help=('S3 secret (defaults to $S3_UPLOAD_SECRET)'), - default=os.environ.get('S3_UPLOAD_SECRET')) - parser.add_argument( - 'files', nargs='+', metavar='FILE', help='Files to upload') - - args = parser.parse_args() - - if args.s3_region: - endpoint = 's3-{}.amazonaws.com'.format(args.s3_region.lower()) - else: - endpoint = 's3.amazonaws.com' - - conn = tinys3.Connection( - access_key=args.s3_key, - secret_key=args.s3_secret, - default_bucket=args.s3_bucket, - tls=True, - endpoint=endpoint, - ) - - for pattern in args.files: - for fn in glob.iglob(pattern): - with open(fn, 'rb') as f: - conn.upload(os.path.basename(fn), f) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/.ci/travis-before-install.sh b/.ci/travis-before-install.sh deleted file mode 100755 index dd624ffa..00000000 --- a/.ci/travis-before-install.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -set -e -x - -if [ -z "${PGVERSION}" ]; then - echo "Missing PGVERSION environment variable." - exit 1 -fi - -if [[ "${TRAVIS_OS_NAME}" == "linux" && "${BUILD}" == *wheels* ]]; then - sudo service postgresql stop ${PGVERSION} - - echo "port = 5432" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf - - if [[ "${BUILD}" == *wheels* ]]; then - # Allow docker guests to connect to the database - echo "listen_addresses = '*'" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf - echo "host all all 172.17.0.0/16 trust" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/pg_hba.conf - - if [ "${PGVERSION}" -ge "11" ]; then - # Disable JIT to avoid unpredictable timings in tests. - echo "jit = off" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf - fi - fi - - sudo pg_ctlcluster ${PGVERSION} main restart -fi - -if [ "${TRAVIS_OS_NAME}" == "osx" ]; then - brew update >/dev/null - brew upgrade pyenv - eval "$(pyenv init -)" - - if ! (pyenv versions | grep "${PYTHON_VERSION}$"); then - pyenv install ${PYTHON_VERSION} - fi - pyenv global ${PYTHON_VERSION} - pyenv rehash - - # Install PostgreSQL - if brew ls --versions postgresql > /dev/null; then - brew remove --force --ignore-dependencies postgresql - fi - - brew install postgresql@${PGVERSION} - brew services start postgresql@${PGVERSION} -fi diff --git a/.ci/travis-build-docs.sh b/.ci/travis-build-docs.sh deleted file mode 100755 index 1716e330..00000000 --- a/.ci/travis-build-docs.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set -e -x - -if [[ "${BUILD}" != *docs* ]]; then - echo "Skipping documentation build." - exit 0 -fi - -pip install -U -e .[docs] -make htmldocs SPHINXOPTS="-q -W -j4" diff --git a/.ci/travis-build-wheels.sh b/.ci/travis-build-wheels.sh deleted file mode 100755 index ae8c52af..00000000 --- a/.ci/travis-build-wheels.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/bash - -set -e -x - - -if [[ "${TRAVIS_BRANCH}" != "releases" || "${BUILD}" != *wheels* ]]; then - # Not a release - exit 0 -fi - - -if [ "${TRAVIS_OS_NAME}" == "osx" ]; then - PYENV_ROOT="$HOME/.pyenv" - PATH="$PYENV_ROOT/bin:$PATH" - eval "$(pyenv init -)" -fi - -PACKAGE_VERSION=$(python ".ci/package-version.py") -PYPI_VERSION=$(python ".ci/pypi-check.py" "${PYMODULE}") - -if [ "${PACKAGE_VERSION}" == "${PYPI_VERSION}" ]; then - echo "${PYMODULE}-${PACKAGE_VERSION} is already published on PyPI" - exit 1 -fi - - -_root="${TRAVIS_BUILD_DIR}" - - -_upload_wheels() { - python "${_root}/.ci/s3-upload.py" "${_root}/dist"/*.whl - sudo rm -rf "${_root}/dist"/*.whl -} - - -pip install -U -r ".ci/requirements-publish.txt" - - -if [ "${TRAVIS_OS_NAME}" == "linux" ]; then - for pyver in ${RELEASE_PYTHON_VERSIONS}; do - ML_PYTHON_VERSION=$(python3 -c \ - "print('cp{maj}{min}-cp{maj}{min}{s}'.format( \ - maj='${pyver}'.split('.')[0], \ - min='${pyver}'.split('.')[1], - s='m' if tuple('${pyver}'.split('.')) < ('3', '8') \ - else ''))") - - if [[ "$(uname -m)" = "x86_64" ]]; then - ARCHES="x86_64 i686" - MANYLINUX_VERSION="1" - elif [[ "$(uname -m)" = "aarch64" ]]; then - ARCHES="aarch64" - MANYLINUX_VERSION="2014" - fi - - for arch in $ARCHES; do - ML_IMAGE="quay.io/pypa/manylinux${MANYLINUX_VERSION}_${arch}" - docker pull "${ML_IMAGE}" - docker run --rm \ - -v "${_root}":/io \ - -e "PYMODULE=${PYMODULE}" \ - -e "PYTHON_VERSION=${ML_PYTHON_VERSION}" \ - -e "ASYNCPG_VERSION=${PACKAGE_VERSION}" \ - "${ML_IMAGE}" /io/.ci/build-manylinux-wheels.sh - - _upload_wheels - done - done - -elif [ "${TRAVIS_OS_NAME}" == "osx" ]; then - export PGINSTALLATION="/usr/local/opt/postgresql@${PGVERSION}/bin" - - make clean - python setup.py bdist_wheel --dist-dir /tmp/ - - pip install /tmp/*.whl - mkdir -p "${_root}/dist" - mv /tmp/*.whl "${_root}/dist/" - make -C "${_root}" ASYNCPG_VERSION="${PACKAGE_VERSION}" testinstalled - - _upload_wheels - -else - echo "Cannot build on ${TRAVIS_OS_NAME}." -fi diff --git a/.ci/travis-install.sh b/.ci/travis-install.sh deleted file mode 100755 index b5124eb8..00000000 --- a/.ci/travis-install.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -set -e -x - -if [ "${TRAVIS_OS_NAME}" == "osx" ]; then - PYENV_ROOT="$HOME/.pyenv" - PATH="$PYENV_ROOT/bin:$PATH" - eval "$(pyenv init -)" -fi - -pip install --upgrade setuptools pip wheel -pip download --dest=/tmp/deps .[test] -pip install -U --no-index --find-links=/tmp/deps /tmp/deps/* diff --git a/.ci/travis-publish-docs.sh b/.ci/travis-publish-docs.sh deleted file mode 100755 index 95e55c79..00000000 --- a/.ci/travis-publish-docs.sh +++ /dev/null @@ -1,70 +0,0 @@ -#!/bin/bash - -# Based on https://gist.github.com/domenic/ec8b0fc8ab45f39403dd - -set -e -x - -SOURCE_BRANCH="master" -TARGET_BRANCH="gh-pages" -DOC_BUILD_DIR="_build/html/" - -if [ "${TRAVIS_PULL_REQUEST}" != "false" ]; then - echo "Skipping documentation deploy." - exit 0 -fi - -pip install -U .[dev] -make htmldocs - -git config --global user.email "infra@magic.io" -git config --global user.name "Travis CI" - -PACKAGE_VERSION=$(python ".ci/package-version.py") -REPO=$(git config remote.origin.url) -SSH_REPO=${REPO/https:\/\/github.com\//git@github.com:} -COMMITISH=$(git rev-parse --verify HEAD) -AUTHOR=$(git show --quiet --format="%aN <%aE>" "${COMMITISH}") - -git clone "${REPO}" docs/gh-pages -cd docs/gh-pages -git checkout "${TARGET_BRANCH}" || git checkout --orphan "${TARGET_BRANCH}" -cd .. - -if [[ ${PACKAGE_VERSION} = *"dev"* ]]; then - VERSION="devel" -else - VERSION="current" -fi - -rm -r "gh-pages/${VERSION}/" -rsync -a "${DOC_BUILD_DIR}/" "gh-pages/${VERSION}/" - -cd gh-pages - -if git diff --quiet --exit-code; then - echo "No changes to documentation." - exit 0 -fi - -git add --all . -git commit -m "Automatic documentation update" --author="${AUTHOR}" - -set +x -echo "Decrypting push key..." -ENCRYPTED_KEY_VAR="encrypted_${DOCS_PUSH_KEY_LABEL}_key" -ENCRYPTED_IV_VAR="encrypted_${DOCS_PUSH_KEY_LABEL}_iv" -ENCRYPTED_KEY=${!ENCRYPTED_KEY_VAR} -ENCRYPTED_IV=${!ENCRYPTED_IV_VAR} -openssl aes-256-cbc -K "${ENCRYPTED_KEY}" -iv "${ENCRYPTED_IV}" \ - -in "${TRAVIS_BUILD_DIR}/.ci/push_key.enc" \ - -out "${TRAVIS_BUILD_DIR}/.ci/push_key" -d -set -x -chmod 600 "${TRAVIS_BUILD_DIR}/.ci/push_key" -eval `ssh-agent -s` -ssh-add "${TRAVIS_BUILD_DIR}/.ci/push_key" - -git push "${SSH_REPO}" "${TARGET_BRANCH}" -rm "${TRAVIS_BUILD_DIR}/.ci/push_key" - -cd "${TRAVIS_BUILD_DIR}" -rm -rf docs/gh-pages diff --git a/.ci/travis-release.sh b/.ci/travis-release.sh deleted file mode 100755 index 3840c620..00000000 --- a/.ci/travis-release.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -set -e -x - -if [ -z "${TRAVIS_TAG}" ]; then - # Not a tagged commit. - exit 0 -fi - -pip install -U -r ".ci/requirements-publish.txt" - -PACKAGE_VERSION=$(python ".ci/package-version.py") -PYPI_VERSION=$(python ".ci/pypi-check.py" "${PYMODULE}") - -if [ "${PACKAGE_VERSION}" == "${PYPI_VERSION}" ]; then - echo "${PYMODULE}-${PACKAGE_VERSION} is already published on PyPI" - exit 0 -fi - -# Check if all expected wheels have been built and uploaded. -if [[ "$(uname -m)" = "x86_64" ]]; then - release_platforms=( - "macosx_10_??_x86_64" - "manylinux1_i686" - "manylinux1_x86_64" - "win32" - "win_amd64" - ) -elif [[ "$(uname -m)" = "aarch64" ]]; then - release_platforms="manylinux2014_aarch64" -fi - -P="${PYMODULE}-${PACKAGE_VERSION}" -expected_wheels=() - -for pyver in ${RELEASE_PYTHON_VERSIONS}; do - abitag=$(python -c \ - "print('cp{maj}{min}-cp{maj}{min}{s}'.format( \ - maj='${pyver}'.split('.')[0], \ - min='${pyver}'.split('.')[1], - s='m' if tuple('${pyver}'.split('.')) < ('3', '8') else ''))") - for plat in "${release_platforms[@]}"; do - expected_wheels+=("${P}-${abitag}-${plat}.whl") - done -done - -rm -rf dist/*.whl dist/*.tar.* -python setup.py sdist -python ".ci/s3-download-release.py" --destdir=dist/ "${P}" - -_file_exists() { [[ -f $1 ]]; } - -for distfile in "${expected_wheels[@]}"; do - if ! _file_exists dist/${distfile}; then - echo "Expected wheel ${distfile} not found." - exit 1 - fi -done - -python -m twine upload dist/*.whl dist/*.tar.* diff --git a/.ci/travis-tests.sh b/.ci/travis-tests.sh deleted file mode 100755 index 397616c5..00000000 --- a/.ci/travis-tests.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -set -e -x - -if [[ "${BUILD}" != *tests* ]]; then - echo "Skipping tests." - exit 0 -fi - -if [ "${TRAVIS_OS_NAME}" == "osx" ]; then - PYENV_ROOT="$HOME/.pyenv" - PATH="$PYENV_ROOT/bin:$PATH" - eval "$(pyenv init -)" -fi - -# Make sure we test with the correct PostgreSQL version. -if [ "${TRAVIS_OS_NAME}" == "osx" ]; then - export PGINSTALLATION="/usr/local/opt/postgresql@${PGVERSION}/bin" -else - export PGINSTALLATION="/usr/lib/postgresql/${PGVERSION}/bin" -fi - -if [[ "${BUILD}" == *quicktests* ]]; then - make && make quicktest -else - make && make test - make clean && make debug && make test -fi diff --git a/.github/workflows/build-manylinux-wheels.sh b/.github/workflows/build-manylinux-wheels.sh new file mode 100755 index 00000000..53ed6d33 --- /dev/null +++ b/.github/workflows/build-manylinux-wheels.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -e -x + +PY_MAJOR=${PYTHON_VERSION%%.*} +PY_MINOR=${PYTHON_VERSION#*.} + +ML_PYTHON_VERSION="cp${PY_MAJOR}${PY_MINOR}-cp${PY_MAJOR}${PY_MINOR}" +if [ "${PY_MAJOR}" -lt "4" -a "${PY_MINOR}" -lt "8" ]; then + ML_PYTHON_VERSION+="m" +fi + +# Compile wheels +PYTHON="/opt/python/${ML_PYTHON_VERSION}/bin/python" +PIP="/opt/python/${ML_PYTHON_VERSION}/bin/pip" +"${PIP}" install --upgrade setuptools pip wheel +cd "${GITHUB_WORKSPACE}" +make clean +"${PYTHON}" setup.py bdist_wheel + +# Bundle external shared libraries into the wheels. +for whl in "${GITHUB_WORKSPACE}"/dist/*.whl; do + auditwheel repair $whl -w "${GITHUB_WORKSPACE}"/dist/ + rm "${GITHUB_WORKSPACE}"/dist/*-linux_*.whl +done diff --git a/.github/workflows/install-postgres.sh b/.github/workflows/install-postgres.sh new file mode 100755 index 00000000..5c6f38bb --- /dev/null +++ b/.github/workflows/install-postgres.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +set -Eexuo pipefail +shopt -s nullglob + +export DEBIAN_FRONTEND=noninteractive + +apt-get install -y --no-install-recommends curl +curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - +mkdir -p /etc/apt/sources.list.d/ +echo "deb https://apt.postgresql.org/pub/repos/apt/ ${DISTRO_NAME}-pgdg main" \ + >> /etc/apt/sources.list.d/pgdg.list +apt-get update +apt-get install -y --no-install-recommends \ + postgresql-${PGVERSION} \ + postgresql-contrib-${PGVERSION} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..870d3551 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,309 @@ +name: Release + +on: + pull_request: + branches: + - "master" + - "ci" + - "[0-9]+.[0-9x]+*" + paths: + - "asyncpg/_version.py" + +jobs: + validate-release-request: + runs-on: ubuntu-latest + steps: + - name: Validate release PR + uses: edgedb/action-release/validate-pr@master + id: checkver + with: + require_team: Release Managers + require_approval: no + github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + version_file: asyncpg/_version.py + version_line_pattern: | + __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + + - name: Stop if not approved + if: steps.checkver.outputs.approved != 'true' + run: | + echo ::error::PR is not approved yet. + exit 1 + + - name: Store release version for later use + env: + VERSION: ${{ steps.checkver.outputs.version }} + run: | + mkdir -p dist/ + echo "${VERSION}" > dist/VERSION + + - uses: actions/upload-artifact@v1 + with: + name: dist + path: dist/ + + build-sdist: + needs: validate-release-request + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + with: + fetch-depth: 50 + submodules: true + + - name: Set up Python 3.7 + uses: actions/setup-python@v1 + with: + python-version: 3.7 + + - name: Build source distribution + run: | + pip install -U setuptools wheel pip + python setup.py sdist + + - uses: actions/upload-artifact@v1 + with: + name: dist + path: dist/ + + build-wheels: + needs: validate-release-request + runs-on: ${{ matrix.os }} + strategy: + matrix: + python-version: [3.5, 3.6, 3.7, 3.8, 3.9] + os: [ubuntu-20.04, macos-latest, windows-latest] + arch: [x86_64, aarch64] + exclude: + # Python 3.5 is unable to properly + # find the recent VS tooling + # https://bugs.python.org/issue30389 + - os: windows-latest + python-version: 3.5 + - os: windows-latest + arch: aarch64 + - os: macos-latest + arch: aarch64 + + defaults: + run: + shell: bash + + steps: + - uses: actions/checkout@v1 + with: + fetch-depth: 50 + submodules: true + + - name: Set up QEMU + if: matrix.arch == 'aarch64' + uses: docker/setup-qemu-action@v1 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Python Deps + run: | + python -m pip install --upgrade setuptools pip wheel + + - name: Build Wheels (linux) + if: startsWith(matrix.os, 'ubuntu') + env: + PYTHON_VERSION: ${{ matrix.python-version }} + ARCH: ${{ matrix.arch }} + run: | + case "${ARCH}" in + x86_64) + mlimg=manylinux1_x86_64 + ;; + aarch64) + mlimg=manylinux2014_aarch64 + ;; + *) + echo "Unsupported wheel arch: ${ARCH}" >&2 + exit 1 + ;; + esac + + docker run --rm \ + -v "${GITHUB_WORKSPACE}":/github/workspace:rw \ + --workdir=/github/workspace \ + -e GITHUB_WORKSPACE=/github/workspace \ + -e PYTHON_VERSION="${PYTHON_VERSION}" \ + --entrypoint=/github/workspace/.github/workflows/build-manylinux-wheels.sh \ + quay.io/pypa/${mlimg} + + - name: Build Wheels (non-linux) + if: "!startsWith(matrix.os, 'ubuntu')" + run: | + make clean + python setup.py bdist_wheel + + - name: Test Wheels (native) + if: | + !contains(github.event.pull_request.labels.*.name, 'skip wheel tests') + && matrix.arch == 'x86_64' + env: + OS: ${{ matrix.os }} + run: | + if [ "${OS}" = "windows-latest" ]; then + export PGINSTALLATION="${PGBIN}" + fi + "${GITHUB_WORKSPACE}/.github/workflows/test-wheels.sh" + + - name: Test Wheels (emulated) + if: | + !contains(github.event.pull_request.labels.*.name, 'skip wheel tests') + && matrix.arch != 'x86_64' + env: + PYTHON_VERSION: ${{ matrix.python-version }} + PGVERSION: 13 + DISTRO_NAME: focal + ARCH: ${{ matrix.arch }} + run: | + sudo env DISTRO_NAME="${DISTRO_NAME}" PGVERSION="${PGVERSION}" \ + .github/workflows/install-postgres.sh + # Allow docker guest to connect to the database + echo "port = 5433" | \ + sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf + echo "listen_addresses = '*'" | \ + sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf + echo "host all all 172.17.0.0/16 trust" | \ + sudo tee --append /etc/postgresql/${PGVERSION}/main/pg_hba.conf + if [ "${PGVERSION}" -ge "11" ]; then + # Disable JIT to avoid unpredictable timings in tests. + echo "jit = off" | \ + sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf + fi + sudo pg_ctlcluster ${PGVERSION} main restart + + case "${ARCH}" in + aarch64) + img="docker.io/arm64v8/python:${PYTHON_VERSION}-buster" + ;; + *) + echo "Unsupported wheel arch: ${ARCH}" >&2 + exit 1 + ;; + esac + + docker run --rm \ + -v "${GITHUB_WORKSPACE}":/github/workspace:rw \ + -e GITHUB_WORKSPACE=/github/workspace \ + -e PYTHON_VERSION="${PYTHON_VERSION}" \ + --workdir=/github/workspace/ \ + ${img} \ + /bin/bash -ex -c ' \ + echo GITHUB_WORKSPACE=${GITHUB_WORKSPACE} >> /etc/environment \ + && echo PGHOST=$(ip route | grep default | cut -f3 -d" " | uniq) \ + >> /etc/environment \ + && echo PGPORT=5433 >> /etc/environment \ + && echo PGUSER=postgres >> /etc/environment \ + && echo ENVIRON_FILE /etc/environment >> /etc/login.defs \ + && useradd -m -s /bin/bash test \ + && su -l test /github/workspace/.github/workflows/test-wheels.sh \ + ' + + - uses: actions/upload-artifact@v1 + with: + name: dist + path: dist/ + + publish-docs: + needs: validate-release-request + runs-on: ubuntu-latest + + steps: + - name: Checkout source + uses: actions/checkout@v2 + with: + fetch-depth: 5 + submodules: true + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Build docs + run: | + pip install -e .[dev] + make htmldocs + + - name: Checkout gh-pages + uses: actions/checkout@v2 + with: + fetch-depth: 5 + ref: gh-pages + path: docs/gh-pages + + - name: Sync docs + run: | + rsync -a docs/_build/html/ docs/gh-pages/current/ + + - name: Commit and push docs + uses: magicstack/gha-commit-and-push@master + with: + target_branch: gh-pages + workdir: docs/gh-pages + commit_message: Automatic documentation update + github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + ssh_key: ${{ secrets.RELEASE_BOT_SSH_KEY }} + gpg_key: ${{ secrets.RELEASE_BOT_GPG_KEY }} + gpg_key_id: "5C468778062D87BF!" + + publish: + needs: [build-sdist, build-wheels, publish-docs] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + with: + fetch-depth: 5 + submodules: false + + - uses: actions/download-artifact@v1 + with: + name: dist + path: dist/ + + - name: Extract Release Version + id: relver + run: | + set -e + echo ::set-output name=version::$(cat dist/VERSION) + rm dist/VERSION + + - name: Merge and tag the PR + uses: edgedb/action-release/merge@master + with: + github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + ssh_key: ${{ secrets.RELEASE_BOT_SSH_KEY }} + gpg_key: ${{ secrets.RELEASE_BOT_GPG_KEY }} + gpg_key_id: "5C468778062D87BF!" + tag_name: v${{ steps.relver.outputs.version }} + + - name: Publish Github Release + uses: elprans/gh-action-create-release@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: v${{ steps.relver.outputs.version }} + release_name: v${{ steps.relver.outputs.version }} + target: ${{ github.event.pull_request.base.ref }} + body: ${{ github.event.pull_request.body }} + draft: true + + - run: | + ls -al dist/ + + - name: Upload to PyPI + uses: pypa/gh-action-pypi-publish@master + with: + user: __token__ + # password: ${{ secrets.PYPI_TOKEN }} + password: ${{ secrets.TEST_PYPI_TOKEN }} + repository_url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/test-wheels.sh b/.github/workflows/test-wheels.sh new file mode 100755 index 00000000..a65828ce --- /dev/null +++ b/.github/workflows/test-wheels.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -Eexuo pipefail +shopt -s nullglob + +pip install --pre -f "file:///${GITHUB_WORKSPACE}/dist" asyncpg +make -C "${GITHUB_WORKSPACE}" testinstalled diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..d92571a9 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,137 @@ +name: Tests + +on: + push: + branches: + - master + - ci + pull_request: + branches: + - master + +jobs: + test-platforms: + # NOTE: this matrix is for testing various combinations of Python and OS + # versions on the system-installed PostgreSQL version (which is usually + # fairly recent). For a PostgreSQL version matrix see the test-postgres + # job. + strategy: + matrix: + python-version: [3.5, 3.6, 3.7, 3.8, 3.9] + os: [ubuntu-latest, macos-latest, windows-latest] + exclude: + # Python 3.5 is unable to properly + # find the recent VS tooling + # https://bugs.python.org/issue30389 + - os: windows-latest + python-version: 3.5 + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash + + steps: + - uses: actions/checkout@v1 + with: + fetch-depth: 50 + submodules: true + + - name: Check if release PR. + uses: edgedb/action-release/validate-pr@master + continue-on-error: true + id: release + with: + github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + version_file: asyncpg/_version.py + version_line_pattern: | + __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + if: steps.release.outputs.version == 0 + with: + python-version: ${{ matrix.python-version }} + + - name: Configure PostgreSQL Environment + if: matrix.os == 'windows-latest' + run: | + echo PGINSTALLATION="${PGBIN}" >> "${GITHUB_ENV}" + + - name: Install Python Deps + if: steps.release.outputs.version == 0 + run: | + pip install -e .[test] + + - name: Test + if: steps.release.outputs.version == 0 + run: | + python setup.py test + + - name: Test under uvloop + if: steps.release.outputs.version == 0 && matrix.os != 'windows-latest' && matrix.python-version != '3.9' + run: | + env USE_UVLOOP=1 python setup.py test + + test-postgres: + strategy: + matrix: + postgres-version: [9.5, 9.6, 10, 11, 12, 13] + + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v1 + with: + fetch-depth: 50 + submodules: true + + - name: Check if release PR. + uses: edgedb/action-release/validate-pr@master + continue-on-error: true + id: release + with: + github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + version_file: asyncpg/_version.py + version_line_pattern: | + __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + + - name: Set up PostgreSQL + if: steps.release.outputs.version == 0 + env: + PGVERSION: ${{ matrix.postgres-version }} + DISTRO_NAME: focal + run: | + sudo env DISTRO_NAME="${DISTRO_NAME}" PGVERSION="${PGVERSION}" \ + .github/workflows/install-postgres.sh + echo PGINSTALLATION="/usr/lib/postgresql/${PGVERSION}/bin" \ + >> "${GITHUB_ENV}" + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + if: steps.release.outputs.version == 0 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Python Deps + if: steps.release.outputs.version == 0 + run: | + pip install -e .[test] + + - name: Test + if: steps.release.outputs.version == 0 + env: + PGVERSION: ${{ matrix.postgres-version }} + run: | + python setup.py test + + # This job exists solely to act as the test job aggregate to be + # targeted by branch policies. + regression-tests: + name: "Regression Tests" + needs: [test-platforms, test-postgres] + runs-on: ubuntu-20.04 + + steps: + - run: echo OK diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 830b0924..00000000 --- a/.travis.yml +++ /dev/null @@ -1,243 +0,0 @@ -language: generic - -env: - global: - - PYMODULE=asyncpg - - RELEASE_PYTHON_VERSIONS="3.5 3.6 3.7 3.8" - - - S3_UPLOAD_USERNAME=oss-ci-bot - - S3_UPLOAD_BUCKET=magicstack-oss-releases - # S3_UPLOAD_ACCESSKEY: - - secure: "iU37gukuyeaYM69StkR/aUTNgolblBdw2is034evvrm/SG0bKyzVVSrcK/dts9jolkCxJi+01VfpzxIBu2PF11QnCN1exUILb+XfmR+dVxUnNY2M1qqjILHvQ92rFJ9f2TlbYa2AlwgKynZlY4+edVSACSWwD/+TbWGAQEp0WInalA8ohljir+EPueXaYyC8mmH55cNQIa5WdDA2Vpg5ahRDdhVyD2J+/fLg78syLV7FGlnpXtASo9XiQKmRpPyHIT23yQB444kVh9xcjvuiB3aUBP5bGC2H4unElGYhCvfQvb1GoWvDqyvfzZvTOjlHqnG4AvIPoSCgEu/9cu8Cm/9OxWtqtWy7dECM8ZUIlOi3oPcvwUYDpNYAdATbTr1T6FRCBEp2eOi3sKoeE+nUDgQaN4r+ple4BKYnjrsSllXhI5W8ZqDNoUSsoGu+z6GFn6Dszrj6jbq8JHV4mZT9RCfR1y6inXWYGmaNRlwzm8wPHTav2RbW2O6bbwkkATWwYpyRB2FRlwMX6BB06druZWNOzx09RS8pTHnqcKOXW2mENNMgrA03OJUEV30UG/ncLZELYTpBARSJwymxjmmTK7vEI/HfxHkPrKcLLPPn2GoWym7mF2Lkh+jp81FkCGYrLTquyKPaoeUsofYukWMbGwE99ePL5dLocVDqTzatAoU=" - # S3_UPLOAD_SECRET: - - secure: "uCcM67fmT3ODhaHrUKhuid/DJZzy9aMtaVdRCkAkjaF/X0Dgj1Dq9bV/PPLn+HjVIRiloHKK4fk6eMLmnI/PWPnTD7hVYse2ECLEeTxZGtoBmTdBPzdf1+aHpl18o8xbN/XfO02SVk6pTHDSjZIwR8ZeiZ2Q75aY462BW9WBgV0JOL9xK5yJd3TjODsDJpwcK0P4TMwi1j2qKIpXMUJaZkyUPafZIykil2CbcePd2Y5AUfDN2BAqaJZqM9vVCeRRs7ECzCamBPsj2WUmXqs621IH3Ml/sSONCzeQoUlgUsG2a7b+Jic92sVsFHyLVqG56G5urIAsXm+Jc/8Ly/dTk1M3ER/OdvsB0z21mhQfaVHwROixPk6HPCbvTl3PITEauaU+wLwCIduiEbb6fcpoB11n3oRzgiLY5e4+QDA86LBNySDhBE8WIq1VKphgTp7ojgM/mHJg4VBZX3m+89JruUOLi49VPx1cK/CiWEBj3gWHZMNDL9agS5N/fwl6UnD5DAklTZtqlA5M2FZ8/aPN8/FgW4jTEgBBU87Ko2rTvVRmKZeCVEkIBS2lYsRDTG3ZmlyJuh2AGGReUzCh524pNAsonIF2ydCOzLv4DlTZSthOwbdnX0EMBRYuPEa436dgkVUUVP6ds5859IPZeXcN6JKJWPWQkzFWFwzoK9ttQLc=" - - - TWINE_USERNAME: magicstack-ci - # TWINE_PASSWORD: - - secure: "jyc9xHK3VjGPxvBZKx8Mcf5nfVvfIyGn6b4atcrmwVdJsV1bBLdKoAjUX3RGjNGyAHpNYOEKOdNfeZs+Wziwg5NK7ucC5qybaBK3MOTEOInCzaO0QJpcxThaHBQkkDxVtn8Qu1Gk3S/hXcXWjT2UEYJvQ84diaXn/XYRxfzOYTZX8eUroAWOMnUCYxlPxGzXTAtmuQSiJkL7P7veZTsWsGCOHtCpdAx7dgGb113CD8QheeUoZlH9Ml6jd3fGFteYmuFp7cR6fa3VYVzxp5BFsdEJqSI4VqDvBOpUoLkbpRRKMjosHKtphfi0PAzbkJw6UdKcrqQ/Ca4nGmWk0PIf3LTsJrv44p4ZTPVI8b3lihXMm72QUE28e11yu9SIZRe0hMgmvWlivXEJw3C3YT1N5w+JM3Y5dIWp/YLoiRXVkIzNJQMN3YeWvKEFf/xO1AD2BO3jjU9oBZfKQpxCJ58gPsQrRt6qM3Y6zYuF8s4B+llpwM/ex2xnNwrTbNkp4ARyXyCujX+ixhjiBLtElfGoHPP1jOaIkJhGje9DxaptddfFBDLAdq0/3Q+LHOmwdQcH5+libUy3HnyP7jf51kjjWE3XEJGSchHI2ewEAn9UZRH8h0UNRXutBzUVvKgC6K1lUvqzEreKVxvrYe6zgbZc/DiUvLgIzJBiJgP9rdZYpDQ=" - - - DOCS_PUSH_KEY_LABEL=0760b951e99c - -branches: - # Avoid building PR branches. - only: - - master - - ci - - releases - - /^v\d+(\.\d+)*$/ - -jobs: - fast_finish: - true - - include: - # Do quick test runs for each supported version of PostgreSQL - # minus the latest. - - name: "Quicktest psql 9.5" - os: linux - dist: focal - language: python - python: "3.8" - env: BUILD=quicktests PGVERSION=9.5 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-9.5 - - postgresql-contrib-9.5 - - - name: "Quicktest psql 9.6" - os: linux - dist: focal - language: python - python: "3.8" - env: BUILD=quicktests PGVERSION=9.6 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-9.6 - - postgresql-contrib-9.6 - - - name: "Quicktest psql 10" - os: linux - dist: focal - language: python - python: "3.8" - env: BUILD=quicktests PGVERSION=10 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-10 - - - name: "Quicktest psql 11" - os: linux - dist: focal - language: python - python: "3.8" - env: BUILD=quicktests PGVERSION=11 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-11 - - # Do a full test run on the latest supported version of PostgreSQL - # on each supported version of Python. - - name: "Test py 3.5" - os: linux - dist: focal - language: python - python: "3.5" - env: BUILD=tests PGVERSION=12 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-12 - - - name: "Test py 3.6" - os: linux - dist: focal - language: python - python: "3.6" - env: BUILD=tests PGVERSION=12 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-12 - - - name: "Test py 3.7" - os: linux - dist: focal - language: python - python: "3.7" - env: BUILD=tests PGVERSION=12 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-12 - - - name: "Test py 3.8" - os: linux - dist: focal - language: python - python: "3.8" - env: BUILD=tests PGVERSION=12 - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-12 - - # Only test on recent aarch64 distribution - # 3.7 is the latest supported by Travis - # https://docs.travis-ci.com/user/languages/python/#python-versions - # The shipped Postgres 9.X collides with the 12 on aarch64 - # until fixed, use official ubuntu repos - - name: "Test aarch64 py 3.8-dev" - os: linux - arch: arm64 - dist: focal - language: python - python: "3.8-dev" - env: BUILD=tests PGVERSION=12 - addons: - postgresql: "12" - - # Build manylinux wheels. Each wheel will be tested, - # so there is no need for BUILD=tests here. - # Also use this job to publish the releases and build - # the documentation. - - name: "x86 wheels and docs" - os: linux - dist: focal - language: python - python: "3.8" - env: BUILD=wheels,docs,release PGVERSION=12 - services: [docker] - addons: - apt: - sources: - - sourceline: 'deb https://apt.postgresql.org/pub/repos/apt/ focal-pgdg main' - key_url: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' - packages: - - postgresql-12 - - # Same for the aarch64 manylinux wheel - - name: "aarch64 wheels" - os: linux - arch: arm64 - dist: focal - language: python - python: "3.8-dev" - env: BUILD=wheels,release PGVERSION=12 - services: [docker] - addons: - postgresql: "12" - - - name: "OSX py 3.5" - os: osx - osx_image: xcode10.2 - env: BUILD=tests,wheels PYTHON_VERSION=3.5.9 PGVERSION=12 - - - name: "OSX py 3.6" - os: osx - osx_image: xcode10.2 - env: BUILD=tests,wheels PYTHON_VERSION=3.6.10 PGVERSION=12 - - - name: "OSX py 3.7" - os: osx - osx_image: xcode10.2 - env: BUILD=tests,wheels PYTHON_VERSION=3.7.7 PGVERSION=12 - - - name: "OSX py 3.8" - os: osx - osx_image: xcode10.2 - env: BUILD=tests,wheels PYTHON_VERSION=3.8.3 PGVERSION=12 - -cache: - pip - -before_install: - - .ci/travis-before-install.sh - -install: - - .ci/travis-install.sh - -script: - - .ci/travis-tests.sh - - .ci/travis-build-docs.sh - - .ci/travis-build-wheels.sh - -deploy: - - provider: script - script: .ci/travis-release.sh - on: - tags: true - condition: '"${BUILD}" == *release*' - - - provider: script - script: .ci/travis-publish-docs.sh - on: - branch: master - condition: '"${BUILD}" == *docs*' diff --git a/Makefile b/Makefile index d1033b66..9ad5d2e7 100644 --- a/Makefile +++ b/Makefile @@ -33,7 +33,7 @@ test: testinstalled: - cd /tmp && $(PYTHON) $(ROOT)/tests/__init__.py + cd "$${HOME}" && $(PYTHON) $(ROOT)/tests/__init__.py quicktest: diff --git a/asyncpg/__init__.py b/asyncpg/__init__.py index d06718a5..49d1e8a4 100644 --- a/asyncpg/__init__.py +++ b/asyncpg/__init__.py @@ -12,23 +12,8 @@ from .types import * # NOQA -__all__ = ('connect', 'create_pool', 'Record', 'Connection') + \ - exceptions.__all__ # NOQA +from ._version import __version__ # NOQA -# The rules of changing __version__: -# -# In a release revision, __version__ must be set to 'x.y.z', -# and the release revision tagged with the 'vx.y.z' tag. -# For example, asyncpg release 0.15.0 should have -# __version__ set to '0.15.0', and tagged with 'v0.15.0'. -# -# In between releases, __version__ must be set to -# 'x.y+1.0.dev0', so asyncpg revisions between 0.15.0 and -# 0.16.0 should have __version__ set to '0.16.0.dev0' in -# the source. -# -# Source and wheel distributions built from development -# snapshots will automatically include the git revision -# in __version__, for example: '0.16.0.dev0+ge06ad03' -__version__ = '0.22.0.dev0' +__all__ = ('connect', 'create_pool', 'Record', 'Connection') + \ + exceptions.__all__ # NOQA diff --git a/asyncpg/_version.py b/asyncpg/_version.py new file mode 100644 index 00000000..1d2d172d --- /dev/null +++ b/asyncpg/_version.py @@ -0,0 +1,13 @@ +# This file MUST NOT contain anything but the __version__ assignment. +# +# When making a release, change the value of __version__ +# to an appropriate value, and open a pull request against +# the correct branch (master if making a new feature release). +# The commit message MUST contain a properly formatted release +# log, and the commit must be signed. +# +# The release automation will: build and test the packages for the +# supported platforms, publish the packages on PyPI, merge the PR +# to the target branch, create a Git tag pointing to the commit. + +__version__ = '0.22.0.dev0' diff --git a/asyncpg/cluster.py b/asyncpg/cluster.py index 47699351..74d303ce 100644 --- a/asyncpg/cluster.py +++ b/asyncpg/cluster.py @@ -6,11 +6,9 @@ import asyncio -import errno import os import os.path import platform -import random import re import shutil import socket @@ -36,29 +34,15 @@ def platform_exe(name): return name -def find_available_port(port_range=(49152, 65535), max_tries=1000): - low, high = port_range - - port = low - try_no = 0 - - while try_no < max_tries: - try_no += 1 - port = random.randint(low, high) - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - sock.bind(('127.0.0.1', port)) - except socket.error as e: - if e.errno == errno.EADDRINUSE: - continue - finally: - sock.close() - - break - else: - port = None - - return port +def find_available_port(): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + sock.bind(('127.0.0.1', 0)) + return sock.getsockname()[1] + except Exception: + return None + finally: + sock.close() class ClusterError(Exception): @@ -124,6 +108,10 @@ def init(self, **settings): 'cluster in {!r} has already been initialized'.format( self._data_dir)) + settings = dict(settings) + if 'encoding' not in settings: + settings['encoding'] = 'UTF-8' + if settings: settings_args = ['--{}={}'.format(k, v) for k, v in settings.items()] @@ -164,8 +152,8 @@ def start(self, wait=60, *, server_settings={}, **opts): sockdir = server_settings.get('unix_socket_directories') if sockdir is None: sockdir = server_settings.get('unix_socket_directory') - if sockdir is None: - sockdir = '/tmp' + if sockdir is None and _system != 'Windows': + sockdir = tempfile.gettempdir() ssl_key = server_settings.get('ssl_key_file') if ssl_key: @@ -176,12 +164,13 @@ def start(self, wait=60, *, server_settings={}, **opts): server_settings = server_settings.copy() server_settings['ssl_key_file'] = keyfile - if self._pg_version < (9, 3): - sockdir_opt = 'unix_socket_directory' - else: - sockdir_opt = 'unix_socket_directories' + if sockdir is not None: + if self._pg_version < (9, 3): + sockdir_opt = 'unix_socket_directory' + else: + sockdir_opt = 'unix_socket_directories' - server_settings[sockdir_opt] = sockdir + server_settings[sockdir_opt] = sockdir for k, v in server_settings.items(): extra_args.extend(['-c', '{}={}'.format(k, v)]) @@ -193,6 +182,14 @@ def start(self, wait=60, *, server_settings={}, **opts): # privileges. if os.getenv('ASYNCPG_DEBUG_SERVER'): stdout = sys.stdout + print( + 'asyncpg.cluster: Running', + ' '.join([ + self._pg_ctl, 'start', '-D', self._data_dir, + '-o', ' '.join(extra_args) + ]), + file=sys.stderr, + ) else: stdout = subprocess.DEVNULL @@ -642,7 +639,7 @@ def start(self, wait=60, *, server_settings={}, **opts): if self._pg_version >= (12, 0): server_settings = server_settings.copy() server_settings['primary_conninfo'] = ( - 'host={host} port={port} user={user}'.format( + '"host={host} port={port} user={user}"'.format( host=self._master['host'], port=self._master['port'], user=self._repl_user, diff --git a/docs/conf.py b/docs/conf.py index d638d31f..0ea293b8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -7,7 +7,7 @@ sys.path.insert(0, os.path.abspath('..')) version_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), - 'asyncpg', '__init__.py') + 'asyncpg', '_version.py') with open(version_file, 'r') as f: for line in f: @@ -17,7 +17,7 @@ break else: raise RuntimeError( - 'unable to read the version from asyncpg/__init__.py') + 'unable to read the version from asyncpg/_version.py') # -- General configuration ------------------------------------------------ diff --git a/setup.py b/setup.py index 7ed4064c..03fb9159 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,7 @@ readme = f.read() -with open(str(_ROOT / 'asyncpg' / '__init__.py')) as f: +with open(str(_ROOT / 'asyncpg' / '_version.py')) as f: for line in f: if line.startswith('__version__ ='): _, _, version = line.partition('=') @@ -77,7 +77,7 @@ break else: raise RuntimeError( - 'unable to read the version from asyncpg/__init__.py') + 'unable to read the version from asyncpg/_version.py') if (_ROOT / '.git').is_dir() and 'dev' in VERSION: @@ -122,7 +122,7 @@ class sdist(setuptools_sdist.sdist, VersionMixin): def make_release_tree(self, base_dir, files): super().make_release_tree(base_dir, files) - self._fix_version(pathlib.Path(base_dir) / 'asyncpg' / '__init__.py') + self._fix_version(pathlib.Path(base_dir) / 'asyncpg' / '_version.py') class build_py(setuptools_build_py.build_py, VersionMixin): diff --git a/tests/test_listeners.py b/tests/test_listeners.py index a4726e2d..1af9627c 100644 --- a/tests/test_listeners.py +++ b/tests/test_listeners.py @@ -144,7 +144,6 @@ async def raise_warning(): expected_msg = { 'context': 'PL/pgSQL function inline_code_block line 2 at RAISE', 'message': 'catch me!', - 'server_source_filename': 'pl_exec.c', 'server_source_function': 'exec_stmt_raise', } @@ -171,12 +170,16 @@ async def raise_warning(): await raise_notice() await raise_warning() + msg = await q1.get() + msg[2].pop('server_source_filename', None) self.assertEqual( - await q1.get(), + msg, (con, exceptions.PostgresLogMessage, expected_msg_notice)) + msg = await q1.get() + msg[2].pop('server_source_filename', None) self.assertEqual( - await q1.get(), + msg, (con, exceptions.PostgresWarning, expected_msg_warn)) con.remove_log_listener(notice_callb) From 0e0eb8d35e4804f681ebac4bc0e77a538d0f4fea Mon Sep 17 00:00:00 2001 From: Karpushkin Gleb Date: Sun, 29 Nov 2020 21:45:48 +0300 Subject: [PATCH 015/193] Expose Pool as asyncpg.Pool (#669) This makes `Pool` accessible as `asyncpg.Pool` for ease of typing annotations and subclassing. Fixes: #643 --- asyncpg/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/asyncpg/__init__.py b/asyncpg/__init__.py index 49d1e8a4..01af7904 100644 --- a/asyncpg/__init__.py +++ b/asyncpg/__init__.py @@ -7,7 +7,7 @@ from .connection import connect, Connection # NOQA from .exceptions import * # NOQA -from .pool import create_pool # NOQA +from .pool import create_pool, Pool # NOQA from .protocol import Record # NOQA from .types import * # NOQA @@ -15,5 +15,7 @@ from ._version import __version__ # NOQA -__all__ = ('connect', 'create_pool', 'Record', 'Connection') + \ - exceptions.__all__ # NOQA +__all__ = ( + ('connect', 'create_pool', 'Pool', 'Record', 'Connection') + + exceptions.__all__ # NOQA +) From ddadce91315473b52a653e1edde6080e14377469 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 29 Nov 2020 10:57:55 -0800 Subject: [PATCH 016/193] Remove remnants of the old CI --- README.rst | 11 ++++------- tests/test_pool.py | 6 +----- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/README.rst b/README.rst index fac5744a..86c52524 100644 --- a/README.rst +++ b/README.rst @@ -1,14 +1,11 @@ asyncpg -- A fast PostgreSQL Database Client Library for Python/asyncio ======================================================================= -.. image:: https://travis-ci.org/MagicStack/asyncpg.svg?branch=master - :target: https://travis-ci.org/MagicStack/asyncpg - -.. image:: https://ci.appveyor.com/api/projects/status/9rwppnxphgc8bqoj/branch/master?svg=true - :target: https://ci.appveyor.com/project/magicstack/asyncpg - +.. image:: https://github.com/MagicStack/asyncpg/workflows/Tests/badge.svg + :target: https://github.com/MagicStack/asyncpg/actions?query=workflow%3ATests+branch%3Amaster + :alt: GitHub Actions status .. image:: https://img.shields.io/pypi/v/asyncpg.svg - :target: https://pypi.python.org/pypi/asyncpg + :target: https://pypi.python.org/pypi/asyncpg **asyncpg** is a database interface library designed specifically for PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation diff --git a/tests/test_pool.py b/tests/test_pool.py index 9857dceb..8a237323 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -24,11 +24,7 @@ _system = platform.uname().system -if os.environ.get('TRAVIS_OS_NAME') == 'osx': - # Travis' macOS is _slow_. - POOL_NOMINAL_TIMEOUT = 0.5 -else: - POOL_NOMINAL_TIMEOUT = 0.1 +POOL_NOMINAL_TIMEOUT = 0.1 class SlowResetConnection(pg_connection.Connection): From 16183aa06c6a7bea1e9b03bc65dfaf61d637fe7e Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 29 Nov 2020 14:51:39 -0800 Subject: [PATCH 017/193] Prefer SSL connections by default (#660) Switch the default SSL mode from 'disabled' to 'prefer'. This matches libpq's behavior and is a sensible thing to do. Fixes: #654 --- asyncpg/connect_utils.py | 19 ++++++---------- asyncpg/connection.py | 26 +++++++++++++++++++++- tests/test_connect.py | 48 ++++++++++++++++++++-------------------- 3 files changed, 56 insertions(+), 37 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 65261664..acfe87e4 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -380,6 +380,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, passfile=passfile) addrs = [] + have_tcp_addrs = False for h, p in zip(host, port): if h.startswith('/'): # UNIX socket name @@ -389,6 +390,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, else: # TCP host/port addrs.append((h, p)) + have_tcp_addrs = True if not addrs: raise ValueError( @@ -397,6 +399,9 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if ssl is None: ssl = os.getenv('PGSSLMODE') + if ssl is None and have_tcp_addrs: + ssl = 'prefer' + # ssl_is_advisory is only allowed to come from the sslmode parameter. ssl_is_advisory = None if isinstance(ssl, str): @@ -435,14 +440,8 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if sslmode <= SSLMODES['require']: ssl.verify_mode = ssl_module.CERT_NONE ssl_is_advisory = sslmode <= SSLMODES['prefer'] - - if ssl: - for addr in addrs: - if isinstance(addr, str): - # UNIX socket - raise exceptions.InterfaceError( - '`ssl` parameter can only be enabled for TCP addresses, ' - 'got a UNIX socket path: {!r}'.format(addr)) + elif ssl is True: + ssl = ssl_module.create_default_context() if server_settings is not None and ( not isinstance(server_settings, dict) or @@ -542,9 +541,6 @@ def connection_lost(self, exc): async def _create_ssl_connection(protocol_factory, host, port, *, loop, ssl_context, ssl_is_advisory=False): - if ssl_context is True: - ssl_context = ssl_module.create_default_context() - tr, pr = await loop.create_connection( lambda: TLSUpgradeProto(loop, host, port, ssl_context, ssl_is_advisory), @@ -625,7 +621,6 @@ async def _connect_addr( if isinstance(addr, str): # UNIX socket - assert not params.ssl connector = loop.create_unix_connection(proto_factory, addr) elif params.ssl: connector = _create_ssl_connection( diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 5942d920..563234dd 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1869,7 +1869,28 @@ async def connect(dsn=None, *, Pass ``True`` or an `ssl.SSLContext `_ instance to require an SSL connection. If ``True``, a default SSL context returned by `ssl.create_default_context() `_ - will be used. + will be used. The value can also be one of the following strings: + + - ``'disable'`` - SSL is disabled (equivalent to ``False``) + - ``'prefer'`` - try SSL first, fallback to non-SSL connection + if SSL connection fails + - ``'allow'`` - currently equivalent to ``'prefer'`` + - ``'require'`` - only try an SSL connection. Certificate + verifiction errors are ignored + - ``'verify-ca'`` - only try an SSL connection, and verify + that the server certificate is issued by a trusted certificate + authority (CA) + - ``'verify-full'`` - only try an SSL connection, verify + that the server certificate is issued by a trusted CA and + that the requested server host name matches that in the + certificate. + + The default is ``'prefer'``: try an SSL connection and fallback to + non-SSL connection if that fails. + + .. note:: + + *ssl* is ignored for Unix domain socket communication. :param dict server_settings: An optional dict of server runtime parameters. Refer to @@ -1926,6 +1947,9 @@ async def connect(dsn=None, *, .. versionchanged:: 0.22.0 Added the *record_class* parameter. + .. versionchanged:: 0.22.0 + The *ssl* argument now defaults to ``'prefer'``. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context diff --git a/tests/test_connect.py b/tests/test_connect.py index 116b8ad9..af927426 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -318,7 +318,9 @@ class TestConnectParams(tb.TestCase): 'result': ([('host', 123)], { 'user': 'user', 'password': 'passw', - 'database': 'testdb'}) + 'database': 'testdb', + 'ssl': True, + 'ssl_is_advisory': True}) }, { @@ -384,7 +386,7 @@ class TestConnectParams(tb.TestCase): 'user': 'user3', 'password': '123123', 'database': 'abcdef', - 'ssl': ssl.SSLContext, + 'ssl': True, 'ssl_is_advisory': True}) }, @@ -461,7 +463,7 @@ class TestConnectParams(tb.TestCase): 'user': 'me', 'password': 'ask', 'database': 'db', - 'ssl': ssl.SSLContext, + 'ssl': True, 'ssl_is_advisory': False}) }, @@ -545,6 +547,7 @@ class TestConnectParams(tb.TestCase): { 'user': 'user', 'database': 'user', + 'ssl': None } ) }, @@ -574,7 +577,9 @@ class TestConnectParams(tb.TestCase): ('localhost', 5433) ], { 'user': 'spam', - 'database': 'db' + 'database': 'db', + 'ssl': True, + 'ssl_is_advisory': True } ) }, @@ -617,7 +622,7 @@ def run_testcase(self, testcase): password = testcase.get('password') passfile = testcase.get('passfile') database = testcase.get('database') - ssl = testcase.get('ssl') + sslmode = testcase.get('ssl') server_settings = testcase.get('server_settings') expected = testcase.get('result') @@ -640,21 +645,26 @@ def run_testcase(self, testcase): addrs, params = connect_utils._parse_connect_dsn_and_args( dsn=dsn, host=host, port=port, user=user, password=password, - passfile=passfile, database=database, ssl=ssl, + passfile=passfile, database=database, ssl=sslmode, connect_timeout=None, server_settings=server_settings) - params = {k: v for k, v in params._asdict().items() - if v is not None} + params = { + k: v for k, v in params._asdict().items() + if v is not None or (expected is not None and k in expected[1]) + } + + if isinstance(params.get('ssl'), ssl.SSLContext): + params['ssl'] = True result = (addrs, params) if expected is not None: - for k, v in expected[1].items(): - # If `expected` contains a type, allow that to "match" any - # instance of that type tyat `result` may contain. We need - # this because different SSLContexts don't compare equal. - if isinstance(v, type) and isinstance(result[1].get(k), v): - result[1][k] = v + if 'ssl' not in expected[1]: + # Avoid the hassle of specifying the default SSL mode + # unless explicitly tested for. + params.pop('ssl', None) + params.pop('ssl_is_advisory', None) + self.assertEqual(expected, result, 'Testcase: {}'.format(testcase)) def test_test_connect_params_environ(self): @@ -1063,16 +1073,6 @@ async def verify_fails(sslmode): await verify_fails('verify-ca') await verify_fails('verify-full') - async def test_connection_ssl_unix(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - ssl_context.load_verify_locations(SSL_CA_CERT_FILE) - - with self.assertRaisesRegex(asyncpg.InterfaceError, - 'can only be enabled for TCP addresses'): - await self.connect( - host='/tmp', - ssl=ssl_context) - async def test_connection_implicit_host(self): conn_spec = self.get_connection_spec() con = await asyncpg.connect( From b53f03847313898bacaaea42f46ad61853acd565 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 1 Dec 2020 17:37:16 -0800 Subject: [PATCH 018/193] Add codecs for a bunch of new builtin types (#665) The last round of support for esoteric builtin types was quite a while ago, so catch up. Out of non-internal types this adds support for the new `jsonpath` type. Fixes: #635. --- asyncpg/pgproto | 2 +- asyncpg/protocol/codecs/pgproto.pyx | 39 +++++++++++++++++++++++++---- asyncpg/protocol/pgtypes.pxi | 22 +++++++++++++++- tests/test_codecs.py | 16 ++++++++++++ tools/generate_type_map.py | 6 +++++ 5 files changed, 78 insertions(+), 7 deletions(-) diff --git a/asyncpg/pgproto b/asyncpg/pgproto index 2dc7cc46..126bcd24 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit 2dc7cc4635b08418dfbc6105b45947244102e86d +Subproject commit 126bcd24bd3c59d581613dae026e2721efbedf16 diff --git a/asyncpg/protocol/codecs/pgproto.pyx b/asyncpg/protocol/codecs/pgproto.pyx index accebcd2..11417d45 100644 --- a/asyncpg/protocol/codecs/pgproto.pyx +++ b/asyncpg/protocol/codecs/pgproto.pyx @@ -180,6 +180,10 @@ cdef init_json_codecs(): pgproto.jsonb_encode, pgproto.jsonb_decode, PG_FORMAT_BINARY) + register_core_codec(JSONPATHOID, + pgproto.jsonpath_encode, + pgproto.jsonpath_decode, + PG_FORMAT_BINARY) cdef init_int_codecs(): @@ -229,6 +233,17 @@ cdef init_pseudo_codecs(): pgproto.uint4_decode, PG_FORMAT_BINARY) + # 64-bit OID types + oid8_types = [ + XID8OID, + ] + + for oid_type in oid8_types: + register_core_codec(oid_type, + pgproto.uint8_encode, + pgproto.uint8_decode, + PG_FORMAT_BINARY) + # reg* types -- these are really system catalog OIDs, but # allow the catalog object name as an input. We could just # decode these as OIDs, but handling them as text seems more @@ -237,7 +252,7 @@ cdef init_pseudo_codecs(): reg_types = [ REGPROCOID, REGPROCEDUREOID, REGOPEROID, REGOPERATOROID, REGCLASSOID, REGTYPEOID, REGCONFIGOID, REGDICTIONARYOID, - REGNAMESPACEOID, REGROLEOID, REFCURSOROID + REGNAMESPACEOID, REGROLEOID, REFCURSOROID, REGCOLLATIONOID, ] for reg_type in reg_types: @@ -256,8 +271,10 @@ cdef init_pseudo_codecs(): no_io_types = [ ANYOID, TRIGGEROID, EVENT_TRIGGEROID, LANGUAGE_HANDLEROID, FDW_HANDLEROID, TSM_HANDLEROID, INTERNALOID, OPAQUEOID, - ANYELEMENTOID, ANYNONARRAYOID, PG_DDL_COMMANDOID, - INDEX_AM_HANDLEROID, + ANYELEMENTOID, ANYNONARRAYOID, ANYCOMPATIBLEOID, + ANYCOMPATIBLEARRAYOID, ANYCOMPATIBLENONARRAYOID, + ANYCOMPATIBLERANGEOID, PG_DDL_COMMANDOID, INDEX_AM_HANDLEROID, + TABLE_AM_HANDLEROID, ] register_core_codec(ANYENUMOID, @@ -306,6 +323,13 @@ cdef init_pseudo_codecs(): pgproto.text_decode, PG_FORMAT_TEXT) + # pg_mcv_list is a special type used in pg_statistic_ext_data + # system catalog + register_core_codec(PG_MCV_LISTOID, + pgproto.bytea_encode, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + cdef init_text_codecs(): textoids = [ @@ -337,8 +361,13 @@ cdef init_tid_codecs(): cdef init_txid_codecs(): register_core_codec(TXID_SNAPSHOTOID, - pgproto.txid_snapshot_encode, - pgproto.txid_snapshot_decode, + pgproto.pg_snapshot_encode, + pgproto.pg_snapshot_decode, + PG_FORMAT_BINARY) + + register_core_codec(PG_SNAPSHOTOID, + pgproto.pg_snapshot_encode, + pgproto.pg_snapshot_decode, PG_FORMAT_BINARY) diff --git a/asyncpg/protocol/pgtypes.pxi b/asyncpg/protocol/pgtypes.pxi index 1be40fb2..d0cc22a6 100644 --- a/asyncpg/protocol/pgtypes.pxi +++ b/asyncpg/protocol/pgtypes.pxi @@ -10,7 +10,7 @@ DEF INVALIDOID = 0 DEF MAXBUILTINOID = 9999 -DEF MAXSUPPORTEDOID = 4096 +DEF MAXSUPPORTEDOID = 5080 DEF BOOLOID = 16 DEF BYTEAOID = 17 @@ -30,6 +30,7 @@ DEF JSONOID = 114 DEF XMLOID = 142 DEF PG_NODE_TREEOID = 194 DEF SMGROID = 210 +DEF TABLE_AM_HANDLEROID = 269 DEF INDEX_AM_HANDLEROID = 325 DEF POINTOID = 600 DEF LSEGOID = 601 @@ -96,8 +97,17 @@ DEF REGDICTIONARYOID = 3769 DEF JSONBOID = 3802 DEF ANYRANGEOID = 3831 DEF EVENT_TRIGGEROID = 3838 +DEF JSONPATHOID = 4072 DEF REGNAMESPACEOID = 4089 DEF REGROLEOID = 4096 +DEF REGCOLLATIONOID = 4191 +DEF PG_MCV_LISTOID = 5017 +DEF PG_SNAPSHOTOID = 5038 +DEF XID8OID = 5069 +DEF ANYCOMPATIBLEOID = 5077 +DEF ANYCOMPATIBLEARRAYOID = 5078 +DEF ANYCOMPATIBLENONARRAYOID = 5079 +DEF ANYCOMPATIBLERANGEOID = 5080 cdef ARRAY_TYPES = (_TEXTOID, _OIDOID,) @@ -105,6 +115,10 @@ BUILTIN_TYPE_OID_MAP = { ABSTIMEOID: 'abstime', ACLITEMOID: 'aclitem', ANYARRAYOID: 'anyarray', + ANYCOMPATIBLEARRAYOID: 'anycompatiblearray', + ANYCOMPATIBLENONARRAYOID: 'anycompatiblenonarray', + ANYCOMPATIBLEOID: 'anycompatible', + ANYCOMPATIBLERANGEOID: 'anycompatiblerange', ANYELEMENTOID: 'anyelement', ANYENUMOID: 'anyenum', ANYNONARRAYOID: 'anynonarray', @@ -135,6 +149,7 @@ BUILTIN_TYPE_OID_MAP = { INTERVALOID: 'interval', JSONBOID: 'jsonb', JSONOID: 'json', + JSONPATHOID: 'jsonpath', LANGUAGE_HANDLEROID: 'language_handler', LINEOID: 'line', LSEGOID: 'lseg', @@ -149,13 +164,16 @@ BUILTIN_TYPE_OID_MAP = { PG_DDL_COMMANDOID: 'pg_ddl_command', PG_DEPENDENCIESOID: 'pg_dependencies', PG_LSNOID: 'pg_lsn', + PG_MCV_LISTOID: 'pg_mcv_list', PG_NDISTINCTOID: 'pg_ndistinct', PG_NODE_TREEOID: 'pg_node_tree', + PG_SNAPSHOTOID: 'pg_snapshot', POINTOID: 'point', POLYGONOID: 'polygon', RECORDOID: 'record', REFCURSOROID: 'refcursor', REGCLASSOID: 'regclass', + REGCOLLATIONOID: 'regcollation', REGCONFIGOID: 'regconfig', REGDICTIONARYOID: 'regdictionary', REGNAMESPACEOID: 'regnamespace', @@ -167,6 +185,7 @@ BUILTIN_TYPE_OID_MAP = { REGTYPEOID: 'regtype', RELTIMEOID: 'reltime', SMGROID: 'smgr', + TABLE_AM_HANDLEROID: 'table_am_handler', TEXTOID: 'text', TIDOID: 'tid', TIMEOID: 'time', @@ -184,6 +203,7 @@ BUILTIN_TYPE_OID_MAP = { VARBITOID: 'varbit', VARCHAROID: 'varchar', VOIDOID: 'void', + XID8OID: 'xid8', XIDOID: 'xid', XMLOID: 'xml', _OIDOID: 'oid[]', diff --git a/tests/test_codecs.py b/tests/test_codecs.py index 8ecbd092..33409911 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -271,6 +271,9 @@ def _system_timezone(): '[1, 2, 3, 4]', '{"a": [1, 2], "b": 0}' ], (9, 4)), + ('jsonpath', 'jsonpath', [ + '$."track"."segments"[*]."HR"?(@ > 130)', + ], (12, 0)), ('oid[]', 'oid[]', [ [1, 2, 3, 4], [] @@ -389,6 +392,19 @@ def _system_timezone(): ('txid_snapshot', 'txid_snapshot', [ (100, 1000, (100, 200, 300, 400)) ]), + ('pg_snapshot', 'pg_snapshot', [ + (100, 1000, (100, 200, 300, 400)) + ], (13, 0)), + ('xid', 'xid', ( + 2 ** 32 - 1, + 0, + 1, + )), + ('xid8', 'xid8', ( + 2 ** 64 - 1, + 0, + 1, + ), (13, 0)), ('varbit', 'varbit', [ asyncpg.BitString('0000 0001'), asyncpg.BitString('00010001'), diff --git a/tools/generate_type_map.py b/tools/generate_type_map.py index e8f14504..8fa816a4 100755 --- a/tools/generate_type_map.py +++ b/tools/generate_type_map.py @@ -33,7 +33,13 @@ 'real': 'float4', 'double precision': 'float8', 'timestamp with timezone': 'timestamptz', + 'timestamp without timezone': 'timestamp', 'time with timezone': 'timetz', + 'time without timezone': 'time', + 'char': 'bpchar', + 'character': 'bpchar', + 'character varying': 'varchar', + 'bit varying': 'varbit' } From 50f964fc3e814f035c198a0e6aec1706681d2718 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 26 Nov 2020 15:28:13 -0800 Subject: [PATCH 019/193] Prohibit custom codecs on domains Postgres always includes the base type OID in the RowDescription message even if the query is technically returning domain values. This makes custom codecs on domains ineffective, and so prohibit them to avoid confusion and bug reports. See postgres/postgres@d9b679c and https://postgr.es/m/27307.1047485980%40sss.pgh.pa.us for context. Fixes: #457. --- asyncpg/connection.py | 11 +++++++++- asyncpg/exceptions/_base.py | 7 ++++++- asyncpg/introspection.py | 4 ++++ asyncpg/protocol/codecs/base.pyx | 9 ++++---- tests/test_codecs.py | 36 +++++++++++++------------------- 5 files changed, 38 insertions(+), 29 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 563234dd..e2355aa8 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1160,9 +1160,18 @@ async def set_type_codec(self, typename, *, self._check_open() typeinfo = await self._introspect_type(typename, schema) if not introspection.is_scalar_type(typeinfo): - raise ValueError( + raise exceptions.InterfaceError( 'cannot use custom codec on non-scalar type {}.{}'.format( schema, typename)) + if introspection.is_domain_type(typeinfo): + raise exceptions.UnsupportedClientFeatureError( + 'custom codecs on domain types are not supported', + hint='Set the codec on the base type.', + detail=( + 'PostgreSQL does not distinguish domains from ' + 'their base types in query results at the protocol level.' + ) + ) oid = typeinfo['oid'] self._protocol.get_settings().add_python_codec( diff --git a/asyncpg/exceptions/_base.py b/asyncpg/exceptions/_base.py index 6b068f2f..fa96a595 100644 --- a/asyncpg/exceptions/_base.py +++ b/asyncpg/exceptions/_base.py @@ -12,7 +12,8 @@ __all__ = ('PostgresError', 'FatalPostgresError', 'UnknownPostgresError', 'InterfaceError', 'InterfaceWarning', 'PostgresLogMessage', - 'InternalClientError', 'OutdatedSchemaCacheError', 'ProtocolError') + 'InternalClientError', 'OutdatedSchemaCacheError', 'ProtocolError', + 'UnsupportedClientFeatureError') def _is_asyncpg_class(cls): @@ -214,6 +215,10 @@ class DataError(InterfaceError, ValueError): """An error caused by invalid query input.""" +class UnsupportedClientFeatureError(InterfaceError): + """Requested feature is unsupported by asyncpg.""" + + class InterfaceWarning(InterfaceMessage, UserWarning): """A warning caused by an improper use of asyncpg API.""" diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index 4854e712..cca07cef 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -168,3 +168,7 @@ def is_scalar_type(typeinfo) -> bool: typeinfo['kind'] in SCALAR_TYPE_KINDS and not typeinfo['elemtype'] ) + + +def is_domain_type(typeinfo) -> bool: + return typeinfo['kind'] == b'd' diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index 1c930cd0..d24cb66d 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -66,14 +66,14 @@ cdef class Codec: self.decoder = &self.decode_array_text elif type == CODEC_RANGE: if format != PG_FORMAT_BINARY: - raise NotImplementedError( + raise exceptions.UnsupportedClientFeatureError( 'cannot decode type "{}"."{}": text encoding of ' 'range types is not supported'.format(schema, name)) self.encoder = &self.encode_range self.decoder = &self.decode_range elif type == CODEC_COMPOSITE: if format != PG_FORMAT_BINARY: - raise NotImplementedError( + raise exceptions.UnsupportedClientFeatureError( 'cannot decode type "{}"."{}": text encoding of ' 'composite types is not supported'.format(schema, name)) self.encoder = &self.encode_composite @@ -675,9 +675,8 @@ cdef class DataCodecConfig: # added builtin types, for which this version of # asyncpg is lacking support. # - raise NotImplementedError( - 'unhandled standard data type {!r} (OID {})'.format( - name, oid)) + raise exceptions.UnsupportedClientFeatureError( + f'unhandled standard data type {name!r} (OID {oid})') else: # This is a non-BKI type, and as such, has no # stable OID, so no possibility of a builtin codec. diff --git a/tests/test_codecs.py b/tests/test_codecs.py index 33409911..70685aeb 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1091,7 +1091,7 @@ async def test_extra_codec_alias(self): # This should fail, as there is no binary codec for # my_dec_t and text decoding of composites is not # implemented. - with self.assertRaises(NotImplementedError): + with self.assertRaises(asyncpg.UnsupportedClientFeatureError): res = await self.con.fetchval(''' SELECT ($1::my_dec_t, 'a=>1'::hstore)::rec_t AS result ''', 44) @@ -1148,7 +1148,7 @@ def hstore_encoder(obj): self.assertEqual(at[0].type, pt[0]) err = 'cannot use custom codec on non-scalar type public._hstore' - with self.assertRaisesRegex(ValueError, err): + with self.assertRaisesRegex(asyncpg.InterfaceError, err): await self.con.set_type_codec('_hstore', encoder=hstore_encoder, decoder=hstore_decoder) @@ -1160,7 +1160,7 @@ def hstore_encoder(obj): try: err = 'cannot use custom codec on non-scalar type ' + \ 'public.mytype' - with self.assertRaisesRegex(ValueError, err): + with self.assertRaisesRegex(asyncpg.InterfaceError, err): await self.con.set_type_codec( 'mytype', encoder=hstore_encoder, decoder=hstore_decoder) @@ -1261,13 +1261,14 @@ async def test_custom_codec_on_domain(self): ''') try: - await self.con.set_type_codec( - 'custom_codec_t', - encoder=lambda v: str(v), - decoder=lambda v: int(v)) - - v = await self.con.fetchval('SELECT $1::custom_codec_t', 10) - self.assertEqual(v, 10) + with self.assertRaisesRegex( + asyncpg.UnsupportedClientFeatureError, + 'custom codecs on domain types are not supported' + ): + await self.con.set_type_codec( + 'custom_codec_t', + encoder=lambda v: str(v), + decoder=lambda v: int(v)) finally: await self.con.execute('DROP DOMAIN custom_codec_t') @@ -1666,7 +1667,7 @@ async def test_unknown_type_text_fallback(self): # Text encoding of ranges and composite types # is not supported yet. with self.assertRaisesRegex( - RuntimeError, + asyncpg.UnsupportedClientFeatureError, 'text encoding of range types is not supported'): await self.con.fetchval(''' @@ -1675,7 +1676,7 @@ async def test_unknown_type_text_fallback(self): ''', ['a', 'z']) with self.assertRaisesRegex( - RuntimeError, + asyncpg.UnsupportedClientFeatureError, 'text encoding of composite types is not supported'): await self.con.fetchval(''' @@ -1847,7 +1848,7 @@ async def test_custom_codec_large_oid(self): expected_oid = self.LARGE_OID if self.server_version >= (11, 0): - # PostgreSQL 11 automatically create a domain array type + # PostgreSQL 11 automatically creates a domain array type # _before_ the domain type, so the expected OID is # off by one. expected_oid += 1 @@ -1858,14 +1859,5 @@ async def test_custom_codec_large_oid(self): v = await self.con.fetchval('SELECT $1::test_domain_t', 10) self.assertEqual(v, 10) - # Test that custom codec logic handles large OIDs - await self.con.set_type_codec( - 'test_domain_t', - encoder=lambda v: str(v), - decoder=lambda v: int(v)) - - v = await self.con.fetchval('SELECT $1::test_domain_t', 10) - self.assertEqual(v, 10) - finally: await self.con.execute('DROP DOMAIN test_domain_t') From 7252dbebb9a808e52704256bf2307a8d779315a1 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 29 Nov 2020 10:18:47 -0800 Subject: [PATCH 020/193] Raise proper error on anonymous composite input (tuple arguments) (#664) Currently asyncpg would crash with an arcane "could not resolve query result and/or argument types in 6 attempts", which isn't helpful. Do the right thing by raising an `UnsupportedClientFeatureError` explicitly instead. Fixes #476. --- asyncpg/exceptions/_base.py | 9 +++++++++ asyncpg/protocol/codecs/record.pyx | 13 ++++++++++++- asyncpg/protocol/prepared_stmt.pyx | 8 +++++--- tests/test_codecs.py | 7 +++++++ 4 files changed, 33 insertions(+), 4 deletions(-) diff --git a/asyncpg/exceptions/_base.py b/asyncpg/exceptions/_base.py index fa96a595..783b5eb5 100644 --- a/asyncpg/exceptions/_base.py +++ b/asyncpg/exceptions/_base.py @@ -210,6 +210,15 @@ def __init__(self, msg, *, detail=None, hint=None): InterfaceMessage.__init__(self, detail=detail, hint=hint) Exception.__init__(self, msg) + def with_msg(self, msg): + return type(self)( + msg, + detail=self.detail, + hint=self.hint, + ).with_traceback( + self.__traceback__ + ) + class DataError(InterfaceError, ValueError): """An error caused by invalid query input.""" diff --git a/asyncpg/protocol/codecs/record.pyx b/asyncpg/protocol/codecs/record.pyx index 5326a8c6..6446f2da 100644 --- a/asyncpg/protocol/codecs/record.pyx +++ b/asyncpg/protocol/codecs/record.pyx @@ -51,9 +51,20 @@ cdef anonymous_record_decode(ConnectionSettings settings, FRBuffer *buf): return result +cdef anonymous_record_encode(ConnectionSettings settings, WriteBuffer buf, obj): + raise exceptions.UnsupportedClientFeatureError( + 'input of anonymous composite types is not supported', + hint=( + 'Consider declaring an explicit composite type and ' + 'using it to cast the argument.' + ), + detail='PostgreSQL does not implement anonymous composite type input.' + ) + + cdef init_record_codecs(): register_core_codec(RECORDOID, - NULL, + anonymous_record_encode, anonymous_record_decode, PG_FORMAT_BINARY) diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index fd9f5a26..5f1820de 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -156,9 +156,11 @@ cdef class PreparedStatementState: except (AssertionError, exceptions.InternalClientError): # These are internal errors and should raise as-is. raise - except exceptions.InterfaceError: - # This is already a descriptive error. - raise + except exceptions.InterfaceError as e: + # This is already a descriptive error, but annotate + # with argument name for clarity. + raise e.with_msg( + f'query argument ${idx + 1}: {e.args[0]}') from None except Exception as e: # Everything else is assumed to be an encoding error # due to invalid input. diff --git a/tests/test_codecs.py b/tests/test_codecs.py index 70685aeb..ae713dc5 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -892,6 +892,13 @@ async def test_composites(self): self.assertEqual(res, (None, 1234, '5678', (42, '42'))) + with self.assertRaisesRegex( + asyncpg.UnsupportedClientFeatureError, + 'query argument \\$1: input of anonymous ' + 'composite types is not supported', + ): + await self.con.fetchval("SELECT (1, 'foo') = $1", (1, 'foo')) + try: st = await self.con.prepare(''' SELECT ROW( From 50f65fbb62ab2df9fd8eddde54217b89c66f6cba Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 1 Dec 2020 17:38:46 -0800 Subject: [PATCH 021/193] Untangle custom codec confusion (#662) Asyncpg currently erroneously prefers binary I/O for underlying type of arrays effectively ignoring a possible custom text codec that might have been configured on a type. Fix this by removing the explicit preference for binary I/O, so that the codec selection preference is now in the following order: - custom binary codec - custom text codec - builtin binary codec - builtin text codec Fixes: #590 Reported-by: @neumond --- asyncpg/connection.py | 9 ++ asyncpg/introspection.py | 31 +++---- asyncpg/protocol/codecs/base.pxd | 3 +- asyncpg/protocol/codecs/base.pyx | 137 +++++++++++++++---------------- asyncpg/protocol/settings.pyx | 11 +-- tests/test_codecs.py | 37 +++++++++ 6 files changed, 123 insertions(+), 105 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index e2355aa8..d33db090 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1156,6 +1156,15 @@ async def set_type_codec(self, typename, *, .. versionchanged:: 0.13.0 The ``binary`` keyword argument was removed in favor of ``format``. + + .. note:: + + It is recommended to use the ``'binary'`` or ``'tuple'`` *format* + whenever possible and if the underlying type supports it. Asyncpg + currently does not support text I/O for composite and range types, + and some other functionality, such as + :meth:`Connection.copy_to_table`, does not support types with text + codecs. """ self._check_open() typeinfo = await self._introspect_type(typename, schema) diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index cca07cef..64508692 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -37,23 +37,9 @@ ELSE NULL END) AS basetype, - t.typreceive::oid != 0 AND t.typsend::oid != 0 - AS has_bin_io, t.typelem AS elemtype, elem_t.typdelim AS elemdelim, range_t.rngsubtype AS range_subtype, - (CASE WHEN t.typtype = 'r' THEN - (SELECT - range_elem_t.typreceive::oid != 0 AND - range_elem_t.typsend::oid != 0 - FROM - pg_catalog.pg_type AS range_elem_t - WHERE - range_elem_t.oid = range_t.rngsubtype) - ELSE - elem_t.typreceive::oid != 0 AND - elem_t.typsend::oid != 0 - END) AS elem_has_bin_io, (CASE WHEN t.typtype = 'c' THEN (SELECT array_agg(ia.atttypid ORDER BY ia.attnum) @@ -98,12 +84,12 @@ INTRO_LOOKUP_TYPES = '''\ WITH RECURSIVE typeinfo_tree( - oid, ns, name, kind, basetype, has_bin_io, elemtype, elemdelim, - range_subtype, elem_has_bin_io, attrtypoids, attrnames, depth) + oid, ns, name, kind, basetype, elemtype, elemdelim, + range_subtype, attrtypoids, attrnames, depth) AS ( SELECT - ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, ti.has_bin_io, - ti.elemtype, ti.elemdelim, ti.range_subtype, ti.elem_has_bin_io, + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, ti.attrtypoids, ti.attrnames, 0 FROM {typeinfo} AS ti @@ -113,8 +99,8 @@ UNION ALL SELECT - ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, ti.has_bin_io, - ti.elemtype, ti.elemdelim, ti.range_subtype, ti.elem_has_bin_io, + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, ti.attrtypoids, ti.attrnames, tt.depth + 1 FROM {typeinfo} ti, @@ -126,7 +112,10 @@ ) SELECT DISTINCT - * + *, + basetype::regtype::text AS basetype_name, + elemtype::regtype::text AS elemtype_name, + range_subtype::regtype::text AS range_subtype_name FROM typeinfo_tree ORDER BY diff --git a/asyncpg/protocol/codecs/base.pxd b/asyncpg/protocol/codecs/base.pxd index e8136f7b..79d7a695 100644 --- a/asyncpg/protocol/codecs/base.pxd +++ b/asyncpg/protocol/codecs/base.pxd @@ -168,4 +168,5 @@ cdef class DataCodecConfig: cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format, bint ignore_custom_codec=*) - cdef inline Codec get_any_local_codec(self, uint32_t oid) + cdef inline Codec get_custom_codec(self, uint32_t oid, + ServerDataFormat format) diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index d24cb66d..e4a767a9 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -440,14 +440,7 @@ cdef class DataCodecConfig: for ti in types: oid = ti['oid'] - if not ti['has_bin_io']: - format = PG_FORMAT_TEXT - else: - format = PG_FORMAT_BINARY - - has_text_elements = False - - if self.get_codec(oid, format) is not None: + if self.get_codec(oid, PG_FORMAT_ANY) is not None: continue name = ti['name'] @@ -468,54 +461,50 @@ cdef class DataCodecConfig: name = name[1:] name = '{}[]'.format(name) - if ti['elem_has_bin_io']: - elem_format = PG_FORMAT_BINARY - else: - elem_format = PG_FORMAT_TEXT - - elem_codec = self.get_codec(array_element_oid, elem_format) + elem_codec = self.get_codec(array_element_oid, PG_FORMAT_ANY) if elem_codec is None: - elem_format = PG_FORMAT_TEXT elem_codec = self.declare_fallback_codec( - array_element_oid, name, schema) + array_element_oid, ti['elemtype_name'], schema) elem_delim = ti['elemdelim'][0] - self._derived_type_codecs[oid, elem_format] = \ + self._derived_type_codecs[oid, elem_codec.format] = \ Codec.new_array_codec( oid, name, schema, elem_codec, elem_delim) elif ti['kind'] == b'c': + # Composite type + if not comp_type_attrs: raise exceptions.InternalClientError( - 'type record missing field types for ' - 'composite {}'.format(oid)) - - # Composite type + f'type record missing field types for composite {oid}') comp_elem_codecs = [] + has_text_elements = False for typoid in comp_type_attrs: - elem_codec = self.get_codec(typoid, PG_FORMAT_BINARY) - if elem_codec is None: - elem_codec = self.get_codec(typoid, PG_FORMAT_TEXT) - has_text_elements = True + elem_codec = self.get_codec(typoid, PG_FORMAT_ANY) if elem_codec is None: raise exceptions.InternalClientError( - 'no codec for composite attribute type {}'.format( - typoid)) + f'no codec for composite attribute type {typoid}') + if elem_codec.format is PG_FORMAT_TEXT: + has_text_elements = True comp_elem_codecs.append(elem_codec) element_names = collections.OrderedDict() for i, attrname in enumerate(ti['attrnames']): element_names[attrname] = i + # If at least one element is text-encoded, we must + # encode the whole composite as text. if has_text_elements: - format = PG_FORMAT_TEXT + elem_format = PG_FORMAT_TEXT + else: + elem_format = PG_FORMAT_BINARY - self._derived_type_codecs[oid, format] = \ + self._derived_type_codecs[oid, elem_format] = \ Codec.new_composite_codec( - oid, name, schema, format, comp_elem_codecs, + oid, name, schema, elem_format, comp_elem_codecs, comp_type_attrs, element_names) elif ti['kind'] == b'd': @@ -523,37 +512,28 @@ cdef class DataCodecConfig: if not base_type: raise exceptions.InternalClientError( - 'type record missing base type for domain {}'.format( - oid)) + f'type record missing base type for domain {oid}') - elem_codec = self.get_codec(base_type, format) + elem_codec = self.get_codec(base_type, PG_FORMAT_ANY) if elem_codec is None: - format = PG_FORMAT_TEXT elem_codec = self.declare_fallback_codec( - base_type, name, schema) + base_type, ti['basetype_name'], schema) - self._derived_type_codecs[oid, format] = elem_codec + self._derived_type_codecs[oid, elem_codec.format] = elem_codec elif ti['kind'] == b'r': # Range type if not range_subtype_oid: raise exceptions.InternalClientError( - 'type record missing base type for range {}'.format( - oid)) + f'type record missing base type for range {oid}') - if ti['elem_has_bin_io']: - elem_format = PG_FORMAT_BINARY - else: - elem_format = PG_FORMAT_TEXT - - elem_codec = self.get_codec(range_subtype_oid, elem_format) + elem_codec = self.get_codec(range_subtype_oid, PG_FORMAT_ANY) if elem_codec is None: - elem_format = PG_FORMAT_TEXT elem_codec = self.declare_fallback_codec( - range_subtype_oid, name, schema) + range_subtype_oid, ti['range_subtype_name'], schema) - self._derived_type_codecs[oid, elem_format] = \ + self._derived_type_codecs[oid, elem_codec.format] = \ Codec.new_range_codec(oid, name, schema, elem_codec) elif ti['kind'] == b'e': @@ -665,10 +645,6 @@ cdef class DataCodecConfig: def declare_fallback_codec(self, uint32_t oid, str name, str schema): cdef Codec codec - codec = self.get_codec(oid, PG_FORMAT_TEXT) - if codec is not None: - return codec - if oid <= MAXBUILTINOID: # This is a BKI type, for which asyncpg has no # defined codec. This should only happen for newly @@ -695,34 +671,49 @@ cdef class DataCodecConfig: bint ignore_custom_codec=False): cdef Codec codec - if not ignore_custom_codec: - codec = self.get_any_local_codec(oid) - if codec is not None: - if codec.format != format: - # The codec for this OID has been overridden by - # set_{builtin}_type_codec with a different format. - # We must respect that and not return a core codec. - return None - else: - return codec - - codec = get_core_codec(oid, format) - if codec is not None: + if format == PG_FORMAT_ANY: + codec = self.get_codec( + oid, PG_FORMAT_BINARY, ignore_custom_codec) + if codec is None: + codec = self.get_codec( + oid, PG_FORMAT_TEXT, ignore_custom_codec) return codec else: - try: - return self._derived_type_codecs[oid, format] - except KeyError: - return None + if not ignore_custom_codec: + codec = self.get_custom_codec(oid, PG_FORMAT_ANY) + if codec is not None: + if codec.format != format: + # The codec for this OID has been overridden by + # set_{builtin}_type_codec with a different format. + # We must respect that and not return a core codec. + return None + else: + return codec + + codec = get_core_codec(oid, format) + if codec is not None: + return codec + else: + try: + return self._derived_type_codecs[oid, format] + except KeyError: + return None - cdef inline Codec get_any_local_codec(self, uint32_t oid): + cdef inline Codec get_custom_codec( + self, + uint32_t oid, + ServerDataFormat format + ): cdef Codec codec - codec = self._custom_type_codecs.get((oid, PG_FORMAT_BINARY)) - if codec is None: - return self._custom_type_codecs.get((oid, PG_FORMAT_TEXT)) + if format == PG_FORMAT_ANY: + codec = self.get_custom_codec(oid, PG_FORMAT_BINARY) + if codec is None: + codec = self.get_custom_codec(oid, PG_FORMAT_TEXT) else: - return codec + codec = self._custom_type_codecs.get((oid, format)) + + return codec cdef inline Codec get_core_codec( diff --git a/asyncpg/protocol/settings.pyx b/asyncpg/protocol/settings.pyx index 9ab32f39..b4cfa399 100644 --- a/asyncpg/protocol/settings.pyx +++ b/asyncpg/protocol/settings.pyx @@ -89,16 +89,7 @@ cdef class ConnectionSettings(pgproto.CodecContext): cpdef inline Codec get_data_codec(self, uint32_t oid, ServerDataFormat format=PG_FORMAT_ANY, bint ignore_custom_codec=False): - if format == PG_FORMAT_ANY: - codec = self._data_codecs.get_codec( - oid, PG_FORMAT_BINARY, ignore_custom_codec) - if codec is None: - codec = self._data_codecs.get_codec( - oid, PG_FORMAT_TEXT, ignore_custom_codec) - return codec - else: - return self._data_codecs.get_codec( - oid, format, ignore_custom_codec) + return self._data_codecs.get_codec(oid, format, ignore_custom_codec) def __getattr__(self, name): if not name.startswith('_'): diff --git a/tests/test_codecs.py b/tests/test_codecs.py index ae713dc5..b4ed7057 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1329,6 +1329,34 @@ async def test_custom_codec_on_enum(self): finally: await self.con.execute('DROP TYPE custom_codec_t') + async def test_custom_codec_on_enum_array(self): + """Test encoding/decoding using a custom codec on an enum array. + + Bug: https://github.com/MagicStack/asyncpg/issues/590 + """ + await self.con.execute(''' + CREATE TYPE custom_codec_t AS ENUM ('foo', 'bar', 'baz') + ''') + + try: + await self.con.set_type_codec( + 'custom_codec_t', + encoder=lambda v: str(v).lstrip('enum :'), + decoder=lambda v: 'enum: ' + str(v)) + + v = await self.con.fetchval( + "SELECT ARRAY['foo', 'bar']::custom_codec_t[]") + self.assertEqual(v, ['enum: foo', 'enum: bar']) + + v = await self.con.fetchval( + 'SELECT ARRAY[$1]::custom_codec_t[]', 'foo') + self.assertEqual(v, ['enum: foo']) + + v = await self.con.fetchval("SELECT 'foo'::custom_codec_t") + self.assertEqual(v, 'enum: foo') + finally: + await self.con.execute('DROP TYPE custom_codec_t') + async def test_custom_codec_override_binary(self): """Test overriding core codecs.""" import json @@ -1374,6 +1402,14 @@ def _decoder(value): res = await conn.fetchval('SELECT $1::json', data) self.assertEqual(data, res) + res = await conn.fetchval('SELECT $1::json[]', [data]) + self.assertEqual([data], res) + + await conn.execute('CREATE DOMAIN my_json AS json') + + res = await conn.fetchval('SELECT $1::my_json', data) + self.assertEqual(data, res) + def _encoder(value): return value @@ -1389,6 +1425,7 @@ def _decoder(value): res = await conn.fetchval('SELECT $1::uuid', data) self.assertEqual(res, data) finally: + await conn.execute('DROP DOMAIN IF EXISTS my_json') await conn.close() async def test_custom_codec_override_tuple(self): From 3d0e23fedfc39d5bd9732911288ad6c33147b525 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 29 Nov 2020 18:37:04 -0800 Subject: [PATCH 022/193] Update README and setup.py to reflect the current state better --- README.rst | 13 +++++++++---- docs/faq.rst | 29 ++++++++++++++++++++--------- performance.png | Bin 18433 -> 23944 bytes setup.py | 3 ++- 4 files changed, 31 insertions(+), 14 deletions(-) diff --git a/README.rst b/README.rst index 86c52524..ce6f8c0d 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,8 @@ framework. You can read more about asyncpg in an introductory `blog post `_. asyncpg requires Python 3.5 or later and is supported for PostgreSQL -versions 9.2 to 12. +versions 9.5 to 13. Older PostgreSQL versions or other databases implementing +the PostgreSQL protocol *may* work, but are not being actively tested. Documentation @@ -31,10 +32,11 @@ In our testing asyncpg is, on average, **3x** faster than psycopg2 (and its asyncio variant -- aiopg). .. image:: performance.png - :target: http://magic.io/blog/asyncpg-1m-rows-from-postgres-to-python/ + :target: https://gistpreview.github.io/?b8eac294ac85da177ff82f784ff2cb60 The above results are a geometric mean of benchmarks obtained with PostgreSQL -`client driver benchmarking toolbench `_. +`client driver benchmarking toolbench `_ +in November 2020. Features @@ -74,7 +76,10 @@ Basic Usage async def run(): conn = await asyncpg.connect(user='user', password='password', database='database', host='127.0.0.1') - values = await conn.fetch('''SELECT * FROM mytable''') + values = await conn.fetch( + 'SELECT * FROM mytable WHERE id = $1', + 10, + ) await conn.close() loop = asyncio.get_event_loop() diff --git a/docs/faq.rst b/docs/faq.rst index 7678b710..664e49bd 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -18,10 +18,10 @@ of asyncpg at some point in the future. Can I use asyncpg with SQLAlchemy ORM? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Short answer: no. asyncpg uses asynchronous execution model -and API, which is fundamentally incompatible with SQLAlchemy. -However, it is possible to use asyncpg and SQLAlchemy Core -with the help of a third-party adapter, such as asyncpgsa_ or databases_. +Yes. SQLAlchemy version 1.4 and later supports the asyncpg dialect natively. +Please refer to its documentation for details. Older SQLAlchemy versions +may be used in tandem with a third-party adapter such as +asyncpgsa_ or databases_. Can I use dot-notation with :class:`asyncpg.Record`? It looks cleaner. @@ -29,7 +29,10 @@ Can I use dot-notation with :class:`asyncpg.Record`? It looks cleaner. We decided against making :class:`asyncpg.Record` a named tuple because we want to keep the ``Record`` method namespace separate -from the column namespace. +from the column namespace. That said, you can provide a custom ``Record`` +class that implements dot-notation via the ``record_class`` argument to +:func:`connect() ` or any of the Record-returning +methods. Why can't I use a :ref:`cursor ` outside of a transaction? @@ -56,15 +59,23 @@ already exists`` errors, you are most likely not connecting to the PostgreSQL server directly, but via `pgbouncer `_. pgbouncer, when in the ``"transaction"`` or ``"statement"`` pooling mode, does not support -prepared statements. You have two options: +prepared statements. You have several options: -* if you are using pgbouncer for connection pooling to a single server, +* if you are using pgbouncer only to reduce the cost of new connections + (as opposed to using pgbouncer for connection pooling from + a large number of clients in the interest of better scalability), switch to the :ref:`connection pool ` functionality provided by asyncpg, it is a much better option for this purpose; -* if you have no option of avoiding the use of pgbouncer, then you need to - switch pgbouncer's ``pool_mode`` to ``session``. +* disable automatic use of prepared statements by passing + ``statement_cache_size=0`` + to :func:`asyncpg.connect() ` and + :func:`asyncpg.create_pool() ` + (and, obviously, avoid the use of + :meth:`Connection.prepare() `); + +* switch pgbouncer's ``pool_mode`` to ``session``. Why do I get ``PostgresSyntaxError`` when using ``expression IN $1``? diff --git a/performance.png b/performance.png index 14f4cd8f895410199cdeb80d1544767495b6693d..d5f9d5071ff3fe26994f1057952180794e3cad03 100644 GIT binary patch literal 23944 zcmdSB2UJsA*EZ@o7E~0DBA`^24pNk&bQ@woKzeV|ODNI_2}ckR6oJtDQ3D7_3lK^u z0@9n(LntBw0trnBB_!M(Kt09te*gE5|BgHE8n6>~S!?aJ=9=?)=A4Vr`>Kj3k24)V zaNxj6(B0b_2M!#@960c6?9qe3nL}4jUjsglx!l!vJ8*#J9p(3zSOFSl;0}}f_p}tW zwY7D0baZuf_4M@g_4N%53=9nojf{+pjg3uAOiWEp&CJZq&CMS?cwk{+VQFb;Wo2b; zZEa&?V{2<`XJ=<`Z|~sX;OOWG27{fPoSdDVU0hsTU0okOeCX!p=I-w9;o;%w>FMR= zkENE{QUf&P$&!r^Y{1v{rBG=J$e)n5D*v``1tYTprD}O;NXyukS9-` zgocKOg@rwR`t;eeXW`-D&w&z~77-B<85tQB6$OXGqobo^Vq#)rW8>oD;^X5JfRfvo zn3$N9l$4yDoRX4~nwpvhl){$u^z=Xe_~XTk7cXDFeD&%T0)fcL$jHph%*x7o{rYuw zc6LrqPHt}Qn>TOr^78WY^9u?J3JVL1ii(g(WN~qENl8g*X(U2`rl#iR=9ZS0R-p7ww6(Rh zx3_n6baVn`V6v;LtGm1VJy3=|fB5jB2PnhSy}iACeSQ5v8T~RaFfcecI5aeb!C;1g zGBJk*%BT4epiIt>j*gB21j_8v=g*&~rlzK+r)PjNzx?IP zmsz0Tzs}9g&Ck!{fU>lT$Kw|k78ZfBLRbRI*Y#zftgf#BWo_dtP=F(0a}_9r%{8E` zZxINDb)al)ZES380%dD^3n;{GB9XWa6cU*P6!Ko#0gB66R-OX~4Bmro-_Y_N#*LD0 zX|2CpT&Haal)8xJZGKA=w(QyrKBFG#Mi*$?CIQ0O_IW?MaC>>;^I1&=cSCmm?5xoK zj~Uqv?3LmD=+;KDc!Pyft*50HS#I(>+WvYDP3FgJ0u`HDV72}RTXHa76u(gyLuOU535vR8E84)kGb+BGi>3XPW3 z4Yhqk8sBNOoUirO*0s13Lvjg4VLhYcMyDLPE?Tk|;qFIfVo zE)dUJIrBwwPKooR%Mq4XLGVlz)#Rhty7CL=SXHM`5S)P&vnCZK=@g+1H<4yPQ4j}$ zhYdFgK};XTZAX^id9pHdLwjSNL_!^r{@m4=;TrVK;Uu3++FW^(?`x{{d~Cqd6}Np( zD%KbX_G3}g{OOhiN`k|=!_Tj6J=mNc7lm{*^hUU3Gp}W{*Cnl5o znrY}!O!Gye>56u83CXr*#a~r^NC;nqH`EjxMA0gTI6`v!z(m!1CsCt(&@`VHEh&BT zNyN9F&Y?Mukibl%vQ%&7UiJuWf>+;`yNIT2N&zry7E#cw6(Y-((KMJ#1%E(BVqT9i zRzQ3lMY64wLyo@OhG9Kpr>#tQuVg7pox4Qf9dPE!49ew;N6=ZsLf3lzFG6aBTxw3A zLPjVMn6wWuAJniuCX=sH*OZRPMNQVx^LyqoU`<|S)VUO_1g9;CI(9iV2E@iVXk+NQ@amwcUl(Cto2{7Mc zdleyCSFWvu;TmQ+;U8H;ZF6U)&+f)X#~FO#x-xB`d1+j@HsR4Ze(Bf}e0te*_=-;Y zlcN|#2V2#p>@S7+jjN^jg;kBPbr3E7?5b-(DqyfVg~b~s2MnSO#taYid#h`8+vEWgLNZXTl7VT1TExpL*LD?@Wp zQh|B08uib!L{MmLC^>~UZxCv)q%;+u3O4?e<#OdOw%+qe9aoeKA;3Q zz;%m7MBJx5&JXdgm>!Lg^Ul7zrZ#9ZcHg*AJ(~%etaVyu5V2wFCWV+s;psFYNKZ?; zQBYwuutgjRob5iTQ986fGi$Ls?nSL~C3oG@HuD&+J}un>OalWw0pscAlGRi^(bnE@ zy87yH3i0p+ypumR(@|K%st0A5VQ3Lstyv@-?WFS*??mrbpa{(0|$_OL}PiGx%}W3`^5PJT*Jno+~C{pOHI1K33y`Hg7&EuS!Lox4GG86vWc8kppu z;|oI`29;lNDRrAY9YKvfVIMnpj@ZowXk?dobsz_GNZB}L_o)5l!XDOQ@*)Darj8-u zZ-NXMV8`bSOE=+dfrd|5qmSH4`1*%W65^b6gF;N-BMkibU<4uc)hSEUtXpbU>767D zWyHB5h}$1*YrlS3SU1Knve9^&R?#^v^t_3h;ml8~O@jtDu?riqk$#OoW$CQ$afjfn z{kHKkMZ!3U$LLQ!TR-0gxlm@#a)XdgUBY#$1?FSD)x&%d(9iL>*07Gj=%x|%*b`O1 z*M>#q`;G`!j~Qh&5j-xdkfJF1lH@a1%ntD%sU3gmZ#?*PBlqmG^lRyro?KFI?XH&h z7s8mDaEu%OF~3O7Wj9Q$PI&{g9evA__#uya#(C-K3b0stq5w5kqk&^ z)DAN9we87uhVvIK(@!AN1>gS)d3_HKYp{6@+*MG6ThGe<65*Nq;G|)u9qX=a!WRVJTVajEWIy`>_R7ECsy4dd>W*H{rY918Q#&P}Qd<>{ zd=tJ>Q)-xTQ|>x&SNb6&v8C!|*d)qOJQ}?LRIhd%)9c|?2(0QBxe803i#I)igQen6 zj=-QRdhQ)=52Gww(H2GlcnLNv{p?sR|l(!t+5?T(^>O#NnM%2i*$)} zKB2dpGl#cEp%&5AU*G>`!vpAouWe%*=Rs`_s@AeYZ1J4c3}bXqvkXqq$zxd0hsyWh zC10}M7+-XIDePK$53bcG!HrqBog*5jqysP2%|*27NzTKg1rT{^>@q*h+fQv^I{66t zOG=CKD!s!n>Uni+PhAlpL=}iIVc5hOik&aR`M2tVdkC*K+jb;LW^*2$P&0HHXSvsc z`D`TEIkpl!9-3W;lFsNB6+V+l2Ms-$^l;Sklc0*f%n=M;Iz2{dQ#>m=B?IW$Wb}8d zR|uH3H+_IvgJeu&QS2&%iYUXFac69PPgMuC!t`yEiC;(FVqM~ga_^sFnmJ%*n|Q>+TfvMZpTy-2%b^g9g==VtH1^=Y%$54w#VSBN0Vj zx?L3huy5wjIFzt+n6li#Ran#1IVh7Uqj3fu~eac5obu1xg_2E;%l6hstYgDQE z+A?M^n&gABdKXC?l*ajGj|Dr3evZ)g#CoB+;~q#~GpA^c3y?siEz!VO{YzId#u)qY zGHRrynm-FuDj-d9moNg&!st}o8v(ucW6D5RJuzOiXiF|dWC7clDY3oMD~&D=F!vou z_I6EzhGM@-Vs`q7%O)!?}R9)4{LNVHNwX)<@C zDDgH$3K;Fu!9HlsG>-;>Cs*qsNnNkUgkjtgy=P}Ma>3(5J@qEUJBijb!!V5( z=NNI1?5B*8qvs;lu z{?zCIBfD>HJlEq-S=H+7_^Bn+98iL;q0s%MSU#gqiwa7C+B-%E#H)q~h}L)9;%YbZsP}${DYd z9Z5^UJ3NnYa#spoe=5i8bZ*D(j?R^qw0v}+f+x^^E0;7N&1y85Zs?k57B3)j2za%& z!4gOp-WvmmQcnv^gc~F}P)h5PNwE|(7+LSk%}yCm5f?h&1xYz|1Oq0LTQhy&Vs0F} zUHBY1idtmlje0jr(INVlwf$RYne52iib-PhpUTNp z`?Pne4fczts+=6>xsv7Ye+xW0n5Z4IZ?h(2s~4BH^x;|>-MepOjUgJemHKVmf*j`r zHD(1&yj23hnNFkdpW_lZ?LMgKqi;`_%L;*f@qQvz2HYTid*%>aAdkG?EqIBGqF31N z96UqH%jwWZUqt%zb#AS`pXLioEyI64JtfYvXyPkbr?oMJBPtb6(#vs~@@%=dyq>B&#_~^q076rc z1t}Q6&i0UYn55RQVa)n_f>LK0SUxE~!m*6Eb{u7#;iG11vymb+ivXh)ieS>;Oj%JI zxiengH4K=X2vY(xNSdFhKdZ47zQKxh1=FUz?tg%F2=H1Vm`qv8oe%H89%Dp0H#LyWswwq#)JA{v_^Nnq}=H|795cwfi|8y*`%x` z%7*cmtFoG;Lf2SO1hY17ybQO-MhzFwu4Dm$I8*#YVh+D7R zbgWy)EXyogO(d-`{#o0!1sS*(^3{^VR0Y7B12)%7Et)enE$J>smv%AHKgX?YM`FZJ zBi`d24aM-760GU{#^N`=5;f5HAKYyHNmn+LF<}$n0ulBdjxMyUDm*CqD=wT8Bh;Hd@JJ=DeVZ6)X zy9J3MNvB3Kr%cia*s_etIy$tL83ZMFBA79DLP9Fb#4XUA-!vUVl>w>S0Oy@uW;X|v zoOQdL?&!DYd#Merb6R06y$fbTudiU0>G?r$Dzg9U_Xl~+xy(_4PV&PMxt zq<6O8FOZrC(_uVe6n()%L~d{svo6MZVdT``U~ZwD8STsqps((UQi0{O-#?=!TrXV_ z!Yn*%hNLG>-kH7Lqvm-x4+Z_s%;HGon;`x$P#B40S37-et+_rAW*3gMuF=2L7^rra zc8X6&{{sDy`1;i|%2v({O44AWy9>6k`Gb{yjMg)!7BGO)S*ce&WuVu$R*AD<2jx2T4Ux|Iqs9sL}qS$6$6QS2G>5|^Dsjp z>eu8UEJ$ob4H#QA|2`rmhXPt-NVCbGU#>37Fje~1@ILL++5GE1n8d;7JSYrozEV*(YD`R7`GdY&jr#2oHZ|9+#it{4u2{ z``*OWFW!mQi~&oKJ|jykT5l#v3vCTnM;4G2t#+C}cH!EOfQdaC$0&d#U6~NCdz;|Q z8C#{!SslBQ>c!c)1*a%=QfwTw#zfjie)s~btx#lUnan4Ozs^6ys#fr@ZlOOfNWt*P zHHs_4)D;tvD>JlFda$7s>Jd`c~0T5h@eL3Ayqv+6(_v`3{3{V3=ZUFML zcT9J|=&n=Ie^DxLq^NzeeLLB4mr+r8(sNub8wTwu!@O(paxYP=D98Tcf;8pp_ahzU z>;3__0DS%15qNZ`9qGmDODLk6p1G&q_S`T~cMB}%F29KeW7VXpkh)1jRDq828VVngVP%T)k1SYGOk{oO z)^*p))E#$8z3wD-L13T+%nA|62;3!$MaY6UElB}>h;EH_DNoOL2G1bU)ThPlRYm+Rm16K3&LSo+nJBk+SKwN+ zrC>HTDr;0bELC~&U5C_4l=#rMlLIGcd%R}L$Wf9W2EWOPDoEo1hc~+DGNpr&# zfRd>>K@M~%bgwes7>{hp(;jN!K}7F_&mQfOJ{y@YoSLf1jvIrsE;bO}I)?@J|XZ z&o0whDSbE|+0Rx&U2G|T4j>-8?1qAAvTiK(nNe(5$Re)VkT z%331yekJ3x@qy!(JV~#~4G2+u?H$UK`m=YoLahjs37%99cVaOnIrk?#vxP^vq-@Zc zM;g#Qr-&);l4s)`IFNL}(wH$KP#r$*)SBR@;$KL*v-MOtHm9f>#8pRvB%kR9cuMas z_lfkynIfPlJjBNykbFx;!iYbWN1hVx&*CS}j11#?D3>@HUa~$s&gqsi_giiDPM}E^B-t!2Nj*Bz^6JPg< zSf=x9lt!f<#YD-*BPkrmsaw-=jK%Iw;TVipt*GsIwn+Jz%q%^ANb&t)J8V{lba&Ue z6yZ)Iy-RsVg^DrfRgCZR#}a_~L~=k>cXc;jv^MsPVYG?lrsoSzL2CKKYk9+dpea@q z-lFI2Q8zX*wP-O58}In_J-)N!xc3r|aEYfC632bqiGJ&51EOBFi*wyfe|M~PFzo;w z=lyH{E4u@&_6*0PB_=W!c+ELRIzK{qx2E*pFwykG;$s%xT3iQ(KR&*_JGP-d6zcNX zJHmXYGM~})=5CJxbx&ToNxA;rGqkHLck7{?`}1G>dX#c6pick!vF~8|^CSOt1%xaQ zR;G!%&o#1mAV`)xKSuhjG@YHCo}cR=mCPh3{|(aLnl*oEp$R4}$n7@O{RjsYoHCHB zpESKo9gz1Z7S1Wo`RhvAJ5JD3SX&Zn;v}J*bGGzr-I9GGKqpROLL*r*JbHxY=9~vT zcah^lO6_SzFROw%O!MOAz*u(W_8P~X^QKb~X*G2b9Y2k^?2ltxje*A0nx@+CPuovH;1or0yYw+Usq09 z?(oE3qO~&3-bCRQd)K1y3{&e^RIb?K?EH!JUar&`1xY=^QEV0+KQSC3 z3&(j)N>xh4$mZkg0!s}XA>naH8mimc{jM`578@3sWlSga*_lRlVNjZl!(n?5`hz=; z0-oXF7dWqshPpuPnKI-2lXXieebHpv8W*?sdE7b`|V9&2JWOmN(b*qY#srMKCetzZGQ1VQlQw<8EJ^LEZ7s} zk#?Bs=Ag_3;22_)kJ%js9>6d4cHutZ2YU7j-9yhUs(^nN(BjFf*k*m?`rXtzItb!1 ztD0%~5C?R!W4cgM+<--nZcqoUa=iABk~_+J;TI@6r4%>nM29G)yI{dHHK+ysf%R%H#8FosO;K0X#&3nJd0aUAS3 z^VXGmkRB~WHTe*{_cliDl0^qr&tzA

MyEei_ZEi4HyE15Z$}3`^`hLB|fSCWg_vb%q$1NMH^%pXY z`~ryjj~WSS53+HcH|?^(>2V7((l#zNVs7R|B^S#_Tm!{CZe(i>KizFrG8S(y>Rb0thQd_us)1Tm5N6spc%&d#B8vo%}S%>>qx%k-j$)DR>!gmOC{7h#TZ=_spV;yW}+*?>r{_N0k`KIxzY~%^B zhgHpml-R+w3%j}>`oPlPXIEmsgSlO>w_6Vd;QjfpeH=)Ee1N_G=f^%8{PQFKbp^!U zN*V>WFPKdeeAB^$iOZXv9*eMtT%uK*vz3*Z#n}^puW)tI`89akmsj?lyJJq?_-M%9 zut!!GWEi!F$gccLXu4GGpD+>-{BJn2!AeaVGq`VQa({+`z;__ZC!{65vdF*Jp-J?U_l0UUbtaa9B8qt>(%>;=eoLpY7==eqvi< z=U|mzIj&th%DrXvC(&kEXzIkzP$gi|0k0_lD_xT68VPB z7vtHEG}sI7;hlJHVrfDRYBbeEX<2Ueg>j6DcLjGUP1c>6H%bLV<$9f-xvQLK^E)FW z_NFwRWF7=dM~?m{^d3}#S1L;MZ2h%zC1;`5i3oDWv%J7>O{ zFW&-DW~mOj2(LMT@#@LJ-a}D=AEZH0hy$@0>fx^uEC>u3be+*0B5^ z%T5*v% zhgxFaCq9#Oh8?UL;y#Lk-kdA)k0M}Di&NQqw%bjZ*!dt2w&Gr{5ClbyvW{dgJC?7k z6y9D8mzaUwD7P$D%HYF3)k-YE>&!CjPVI~a!D?BBFsdO~v%}j#z+V)^l{&}RP5*)C zp-*zHV|%SL#)VB?_Ffcq8{uWWyVXjQ-H(b;I(dtbeNmfvPs8&Of6Xf!*VU27$i-yo zSp{jGH9Fp_c`3G9pBG<(@9jyonK{uFZ!L5wPUny_`RZLw#u%lH>OE7Wi4jpSG&;iiI+E0g;W(c1fKuZM?Tw+Ob&Dou21yH2~~OoC08dHz;`9#Kwowf7NkO@wTn->V0|5#pQ-H({fSN%ez(+@sz0_ppWOyZaT|8maM=5(9Wr` zw144PKD4A-QRy-5u;;#)^tQ>%zv8_L+*%T|BZ5MK4W-zgd1b#PI6}{!5LPm#@cgLw zYrLb>P0kL7zr7JYiOF#!_W2M6<~?KnuGsd*D&-r%ONu0qw+{H<@wD`hLVDdD1Y>yu z3NBUyC@*vm<$WYqtAyDQDAM{)xJM34sFo?cAgywHryW7GBQF9g&lG`Mr>Rv!!FP)^m$7d`S|F)T)#qz6`DSX;*Um+@{iv1q>wLyQCrQWnpV$O?&oKDq!BvU z8e>FIH6i>;Crb$laZ^-i=wN>v+7d0wwg|OWUFCZIUAG;htYf!bZl%DNd^(LUsKITndSwQeE8Qt;1!2n54W(xs2Fc`ZFA0 zvjE!jJ&%NW>iPU6^_OA5@FHf18Lll~MjM*_x>abccTInlA*4;roV60%+8aC9<9tk#utDfI|Rlt=d zFJP=xd#cvrySh_ZQXL!1t?HmcL`kCQsdeo0L8%1#*%Yv}_M5!D*QGf35aTXoN)b48 z&&0=2a2Qr-Fxbpnb?z8IAMN^5%r^Awi?JDFW>vSv_a7m85Ndnvv8o-~v3Ta5b6FW` z-;)F>l=JT}Z+AhCm@>QZtE~+l5f;?l4*q*)V7a3E-xCA(+q(5F+{Eg~w59d4cvI+} zNDAK|jP`PZGP1Ze2>lI#+`FwFi>cN>oW9OQ-md9$B5I(913k^-a6FVySI(VysNCKH z0ep4sJ-+}MvY9kPvKlr^@7Vavn|m3B2zSS?6-zp*hBJh8zbox`imn&t&HHfLl855> z0gx`c{T-L$0*KGB^LX5IDmC1Hb|(((%Z3o~C&43t90P^`;(#o(}e+ z^0Jp<&aaZW^Z`O)91Dz(l_heul91JRV?kJ_Sf{-OLvhGv-<>i>shx2kME;+o>;b71 z?6FM2I8B8diEqQ{`Co>Dg!d|_J%st+27j3MUfVPnM|eN=RW|g^@=s$uJjG4>N;G6& zi3$>jlD`q=J!NSrp9BA8Z;sRNA=eA$0Y4+7p^qpT|qq#Gl3KRS3nx-%=@#(f>EOfH7d^qc#ZrE zK)mJtpR^`0dB&Sb{b_e2X#SR*y`VmgFc}=7LP3`K!LwlW_F(KS)TWn=&DwQ4D zjoz1ton6o#d(Z3UDZuci&nQ|7FVG{2^~}*Tj7p_of7$i_(>Tk%a`+Kxam3bKNIjNj z8>{gxSic)>_#UedsVg*SkKAiz25A~C!FU%w!2OT#JP@Jy9-j9-ePYnMMR5;0jI_dT zYJ3R6o03fIPsp8w=KR!cNMIb@S(aBtk$0uA))un^N`i`-U0a-f*IMK2eSkDVQj1=Y zA+uB7mZZ==mSnw^xtjea-V9(s?N{w`?7Jig07*hb%@6@?FvXwf@`E*}U|1xw?mTU< z`7~@_2AW`u8L&N;dwCB#0H%#yR2SK28$|(_C;kR70q}W3b91Xh?iuI6K+T@RoB~tca892+Xs>LfSb^1n$^F z>x*EGeYFXAO;moQL;rZNS|@{+Wy7zD@b~QJcnsH-zYyvZ@+s9pyLjh0t|FdfT@$xD z%&06TZ%6cuVjv7(Z4+~QPl!I=S#CzFC9nv`cX?N>CrUcS&~TD0L;CEoKXT0WTCa#7 z#ruSHCUbNi9l-u9gkIu4gOU86fo3JajL4=|%IGoK3q)A@pF0|H;)j zHYK|sG-uwQOw%wN0snP0P`}}av(a_yO0zykH^@lVUp7&kyj(x^LVf4jep^5sxp|~Y zeW6^gh9WQ&nbGAS0vXHacU2t+(Gp(mZyZm#0Q^8T@|vm=_HSg}Teka`_HOvMI7lL;ktpngsNXQ4IjV} z3i4K+&pTWpM)1|)O*8M^T;Sk+Q4&@mU={2z(aJG{E}wJzHLAfx3p-3ap8a#~_J`i9 z8&Q%r2&Ax-XWZ+L!Ql08*}pfw34uORSl58?_n@_nwc@|%(7houeU95fz^7i|{<)VW zQ}Jh*bJwO3m7_>bMv8;MfP+!iyHMvTxZp#NS-GxHBJEv{pQzHk86#!1nB>}rAje~A zsA!!uM?5Pe4mG%GsBOs~ijo8f_z8ApuqV2Y`C0tyBj4mR7}qRbdR35ka`xVmZEg%w zxkU0T`8<@dE21k~*;Hrp&wj<-kstt$w72*09tMQJ?LsLBtkB;!p?q8X%D}doYl-UR zKTm85{jt`|ig*(Q&KkN)v+OH~*n86PXj;YR?-X^)q^6@ZXbi_vNh;SN61| zaIi)_GcgdAu4UVF81jdKPrRd7(`0pbhwFFdF2^r^0+SV{1^y=Avq<9ovD8s;v1 z_tc8C2-Yu3dQoc1UTLYK-x>Qt`HiH?KeYJq3j1Se`UpqSkc zTb^}0=R?+`1rjIznS1)DK=q!8`tu>>{S@JdYC56no>{9tlsvkh zZ3NsyBNRGjnjeO^mAN=<^7kmUFolr)6LEgWG3646nf>AUluZvJ`IMZ|ebm_t4etDN z&S*eT+5<2&3b{kv@kFk%?{%8zCZVg#Fp(?Hl)}`_s#ILDQ^UWV=myDBymNn5o6`zy zeVRH^H2fL##ProLxSNO z_7EV7(*dt;b=vh@?oS{b*UHqm^XXU&bZu!L5kOyB?c6VNn-!o>&-u2Sfn`76XWwK@ zwbcA|L!PkF_YHY&lAr$+y57^CfNZhDJ%f^gkhLA2+Oc~eq;BbDpLko!)<^Qqh=8sP-*;tJa5pL@ z|0K0%&m;j+`(nM^bQxABb#qhi4_h~|`p#4<9!43xN^RM3*a8uta+g}kmL8`me{mIh7P6Qt=1#lUZUO}*0TimFMnIHB<)xz z`DqMEej7Wo>P+seZT*nG>Mz%aU!3;&~B}F zyPhG}g1pXp^!GM6kyr}(x$xsAPHFj0mc!3~ z&2&8znVN3>{8zz!ohY7<2Qh>!Tt?)zLtm)WuqYBDU!L@G)=>}M7L&L9H z{lmlTEifx(@Mu1ImL%kqZHV=ADml2x&-Mg5Kj62KCtSit-Bc!LSe98tQol`T$Ik22 z`g3}h6t9&N4aTxDeq1D`@vHq+XtX@=ezT=Bd(l6&eYuMnUaJflFY28hMD1< zqzf*-X4#x}V_WwLV~61gJAWkJx)ChBY*N@hVp#CyMmY!TrC3hVLd~dvI2Pp}hsviV z_xD-E+c&TW`foZ1ik3-v&*h~n%0w2JjTV65a}q}uTQv+V;D}nHoFPGRKENL@DO-m{ zjhCv$a+V%(KE%WYYE;a48ok2%7)dLC{6p=w?hbE^J@z4}=M5Xy%2(8Tqs-tT?{?^r zMZw^F19>HcJx97H9VQIg!XMh1If+q>la*TwJSc5FyW_mL?VnUhmM4EH{WXr}@Tmlg z*Bhyqv+83SU+^~Z9r=*$ynO}t8-1#UtLlf2#Lef6=TC{hD-;pukK%QGclOqUC%11Z zF|BMD$jPS|+A4pTmFq>NH4FDZ_y}uK>oUFz6*JU|T-nVLi`ZTsoq8$IalycDTK3K1 zx`b+f2J)6@2=y}gvN=Jpa3Lhlp7wA9aD|g9 z^dbVheC0>GgVR;PiA~LcuBQ_puM%A)*rc@rEK+o+xjJicofoSG>Ty%v5-C!e^Hu}Y zAnlH&jxop^?vvyggsjN=b+^23kU>^8H+4SdRq=0z8?EUK)|tqUO7x4R}UJ+Bec&bMUimZitxD!Wb1oLkiFXmx9Y{{ zNPC%h&RJ>R*uLlLo0FI`!A+fHdD}RIj^?oTQlm0R*DL}tX6GkyaaJAHBZU{&7qO0L zy?JjU>pH_jS6DQnp-^$|(Gk$`kE;U6blIFG3yps2v8V7RB^UB2ce*&luQ<6TNM(c5 zqf+*jyZcB#it#?b9alvr&TwM898=IYkYq0v$OO{f=!glH zsfj6}t>gqT!u#zC+5;{_?lC+#F27m)$H-E@Y&;9-X`(PldBaHEI)Z;YL2Ip2@Iq>U zlLKMhj3Ad7_gb;XKS9n?mF1D#Nol9}VrdC$_O!afQG*6+|4%O*uVqKSr_Sn7%T46S z4!~Gw*1d)w$z?BjyDa#?d$RwH$he5-J?E-Yq>j zt;7Yr6Xk=6dZP+4W_ZxllyK?Y8Q6-A`GfO%2iTMpUnu&1x_lB1vanG4lwn_`WBe$0 z0JqqQqq^S16gEnix`bZ1u8hD4Ecc(25qrK~>>T(;&?YT20ONJ*?iPJMXkP!dw{`rj z>b^q@ULW<2pecQkoP|YKh_9<1Eubq7Q#fT!OY*YuY3<3d(^Ak`&`{#+%)HW?N~pZ_ zxRWh5D4|}BeCeJMF8&V{U#alG9#GXm*G?0q*3VkugE=(i4?4+dohJ24>+f!D=ntyI zuasky6U}zYG0o3Ae$^XG+P0QI(w*@~x1npiXPX-5rjmwnMcGJeHO}G!m-c&= zw0Uy@rW~E*b3P}{rJ6d04h!RX3D^w`D*D> zz1<-98!ga8Ifi|6zH=qq)Wn=Hq@`cX1IOgH z^lljIg={s2ES)RXU2^@LJb#g$f#4j_^!Z->V~oQ+S2BFe{J*%qVSKOT0mLwb1As4wIw>_(D6Nt#WM$2jEe#!(?O+!Bo%|Zi; z;3TY9v?3Bn8zfXNg@@@67NxmoR(MUB%uOa3E02WgziRv(q)~@UQoL~=zU3FAG#u`x z2-kX=#Z!;-xd5{N`c9O%xx}$viSQV9K>j8H(W0!y!^p_?myw*Z?Jmg<;K+sNP6Nli zww>=yG$VF9lz(JM^wOx{#aFr-&P)nr2t}&}mOj=(tNTpDi-;Y138IQ#uRyp2mz(p6 zlN=g0G>La+)*tmRU|8g+n=V5_8rooXW~?RrTph3@HNN~FUE*%;N05A9-~qqfD=WH8 zlpET*Da@4BkklG0t@U`ZJ|UO@FHwc4Fg(LpAb-=`;EN10iVkXSZ{S#Kw!W1~WXU`2 z@6S@a+qoAVY&nmgeX0-{eP*_GDj{|L#rp(Vy(7I(84;5#%+AHRzwqis9#g&pAHaz& z3-TTqQTU_eD9qD;K2=^LI(6!l}!XnBPwpekGy}TH*)L;f}-!RCMe{{9X{NaRR z7BFrvI`ZEy%e>vZDC2+|lsQif5nTu~d-2NcK%D7^#~6?Nqi<_@^k)RtZnl%}onO4U zai7j^!No(?_?Hqj3G^_-!8574ISEkrTH1thleXg^4YeS~gnqLm`-RudWO25oHw&0& zqSq=O&JSpu&m&whQf|Fe{)+8Yu}Jn(v>bJXR=VJUE49=G&g(YiYeGTxtV4`hORqGn zI@4TazLcLUIyN>mN|08k?j?A-R}{m!o0?Wlookb;&@QX>d_hj7|niya{C@m%I`nlYBUEH_C{2q7e7dv~y_|t9ba`5NQCRYkMe47M zLYcH!N)_DBqF=2(IaLALZKqTBbLMNzLCXj~m&Q$-X%OhR&Zi0z%HLx06AV-tb;_sP zqBwHfx_08Sz?wmEmJF?sgs*;dm~}0Al3T4u2uY^34Ymr;BKV(>9wVroVECA8C#f^_ z(ci2%@}2$@31VQn7mZr*0SI;EpmQhL+@;k0##CqDQL8`l^94RlMSbeY{6LGm7t%HG2J@TuMz^mh(?G!} z*`dYyLUyrB;ew;3v1ygN?ym(?_MJ{edY7_;#nFuqiu8&NFEKoAOd18gUK+pwUmp}% zm+6tgX}D45%YtNL^$b`ExDdDAvFwq$vn0$Sy}h04KUbtIJ(!ngr}BiA$~kdVm>oPs zr%{zX&G1wlJ;f^e1TFg3&@HygtLBY*x1T7Qt5({z!^CD_y%?yw*ld zs!on+e9-9xGJqXjG~He}3$n=Fw7bmC^Bi*$cnJrBmbu7pXV1#%Yu8kWS8XV~F0J&N z%rwlbvB^1a7INovV|s^qA+^}|b+2Nl z#KDzJ*91HwocnZdS=T32OaFG(wG(D%xViO;+b#ar+Sv%_LBG=4tK0Ls4z=4-@8wyJ zu4Y*pC$KC9Lg^jW$3D!y^_ca3`-YlH>a?vYSr1Lni2;)(t8%uV(lNK!$ZfpUV)yE= zl74i(z=?`!ZN*X>6GuHkrBNLs5d+?G-GG)iB#~6U1VvXIxFiZ??q)c6f9OfHcRB$g zeBhk*qK21gSPFL$gs@ z!G5;m!5OC?HgnD%8iD6ST65%~Uo5T({uR6rFE(Smarcis$ABb9DqBOh#_8VSmV7>b zITG^b7m&8Mp5JSqr;Ia6oZyg6m0Dakw?`)%>k=0&IkkD6{-euvCV>kM_kELEkqFbR z3YP9*O&j~_4&MveJmD#es{W=2crpZBfOkRabCmurY+>jIrLpD-ypu4v@s*8TbfFsJ zrB_`00#gj(49^d0hr=ZDG>n(I(|{DgB2~C=ufDuF!SmTi_U_F0&)Ph9!S;;yFNcf2 zJeMHa+=8buM9Q6)^?2sv9}wL`>tYPoQRfY(kJ{1dSvrjj=z(410fA}8-+^q!)0E+> ztm@Xz)&^?7CRKyru%J|Uj#)gX@jCK$k1?%406eWz&A3?cu#C>z4n_E#wvWG`G7HyI z7$#ZLuKK^f1j=jrgas`|iy7U`7n8usK<6IoWFLOGM@2MDnH+AyeMvF3`>(SH>Lt;Y$pil>+N+wu>7F-=nL%KcSJt>|T^lV^?GNgjl1`FRjO zAK8Q{rHcl1%Y;S=JfV4Y0vSx2*eN*Tyn}dLb1$nS#pE|T0(RE9mN;E&OQ4@#x`Mo^ zL66CFtvtM-JpKtrM~gM@&(`+R;1q|G!%83GF)AWLjxSt<*z3o@U7yKbWL-cre&0nc=c4I^b0LFWsNs$WA0S`! zOE8)44u|Fh*^(HpbmX-l)I?JI9S}BQK*AMAvwc20axJR#C0hhSArq@GH;VT9xZ7cT zM-B5$U$ZcA^UksA+L!LLJ^I$|VENLXbe9jZ6=4cdu*ySVO{&#+QId~QvBTwdE--~a z)Ju14A0I@lU5e3as(-RBUKhD(cPY>~`n2z9U+zTG)J1Dq)yM9tfFN$j-gM`&mLl91fmpUs+ zs(rL|IPQ-R85$FeArn}wCnabRqo^FtS=hPjI(c?ru}?CZ9ORFj#UUR*Lfk{diP%Kh z1Gg#<=N)q(yuaudz8=sM2XrcsX5Eh~C<-?B_^Bq9)^7{96DwNJ0^u;LxZKd>vya^P*lV+hoN!v1$4LX02wE zRT8IW(IK~T%ugm|`;pwra_ekOq}X7BvF}g`H?_5qgjaFO`-dpohsmSn~NWvAS!-xT%*6asDr)0+yqK}be%p43dp#5*{^hl}Qo*w%+2LOo zP9Fv+iK@~U%#vI4vEs!h$@uf9EucS{V(wv)MQ8dJ~@Xwxf<^R2`d5OnBAf zW1AfFp6%%EWKxT_mt}oYHNz2(+g?mU%JdVV#@WuEbW5$v*Nlt3q{P@7-2`V33LX>A zy&TVle&*oK~h0<=XuUF;~aR!oU3#VrI)%!0l^BkE}$M9yah*1u9nNPAG7uJ z<;s>{IhNx&IpPZaJR23WM>))`{A6eEL8iy@ zlGtXb-}4E+Fw=wIdD=>?qWVSVcmTX_M$}q;#vo4p7x~1drlsmnzkoP`&V&auWM22bcWjpm-RC0FJNX&8cgn@wsf3(r zP9R;{9t5uu0~Sa2EFaSyL>J4IbdYbtoezT91t|b>mA*9J|7Fu?Ni6|-J~=>$@2rci zT1QYpno>l3@{982gHor~&h%9WLm3f8=`2c|qmRX_du;!o1y}s}RLf!Rne{=2*Hiqj z&h#q}H*AWRib&rjnJip&tY*%+ooUO0W>24$IQ{$npU0NW^qODh*}vbQ|In%hFSeP) zFn^uYRQxe^np?k>rcJP>Q9^EOn_z3*NrSZeyw{iLssH?){O#a1@v4g!ve!JK&h{8D z4m!W*L&cXqcpHV2#B=llxk z^1zSI6>UcQcOKWT-m_?L-?i=@ zH-G8LrBSbT-jc0xbts*@eQySEB&@W|rfB+IJ z`&J5DeR4nUymCg@k{t?GhZ;jC>VJ*jEoiuK_4>!pPK(qxNy&aV(EIwliOZLgry&Us zetlg&F?U(?TXQxwzp9~Hl+-wrO) zb@a4)rpG@0OWOb4jC;?$Vcg_?Pi)5izV^mb^h1B7x$4UQ!9`8-lZ4sQ zW~){Up9QAZ|3}`$WOcvd^H)Fr#1#NI=uWEOT z-2W3f5u2@VUgP-l;H=!8ylo#&*?eZ?e5g2~OjQw7mbsP3m6jTI2e;n(R6!m_Q>1MtAa-E>vzZoT+Ho?~gzW>;}YuCOhYq%#f*VQbl^8I>J zTz-mxmsr?cFCSpo0(*Yms}jMzK}63F*c*g&4AI|N)elF{r5}E+W%F(X? literal 18433 zcmd743p|wR|39qLwxzOLN>OxHVVq{jc}uobsN|GWVlbE)42GQAw%AB^b8N<0j>9No zjKORPWg2HDW`@NYF^mv{ad_@~9NOLQe*gdH_dKuX^?!PKP3FEo*L8ia&*A+!T=(tK zHRFprw@Ypp5fRyGcEHst0e5f;@Olr|77^KW5^>Yg$I|GkrXv~&aR6x`{zy+?w1|kdp1-GqqdVN^ z@Ey1d0;O|woI*W%7~!OI)BkT)(Ykbr3uDhd#(@{O$!`l9uzywIR zkHcYqqz4M4>92ECIIkw~J9t^~=wTs=kGsy%6{akWt{v7#d&3XIAZiMZN^0td)iof> zFm-jPy8K}kC1seRlB%Mrl7fn=CR9lis&aVs-%(&SZzpHX8y9|DT?_c8bJW$x$5T^L z5sSq_u&NNWw~L~(hK7csl8T~=iUL5PfC)hPIQT1|Fvr$rxB$mEdLul25NOn4a7Kqa zXkQ1c7kURMv;7shm?%g{hoVR#rApJE!ua+66TYC=>=& zhMiZ3u8qBbcJxKUQ9f&9oxYBR{@1afIUqfOl`p`(5q@x|AHC7Y!@_NABK|us%Kvq~ z)v-?hofnn=I#v;2MiEr*UzELe39umW*LBkczN{NR90gdsH(=WPujBFow@|uv^`?P` zhsWU1h`G6WSXfv`C!?sMv8=4DzP_H>!x{fPnVp>-A0IzAH%EEfnL>EGw6wHELElb9 zB)y2tp^9H z(+K&z%vrO!L>aNzIuVf|5~sVMJZ72@jk|Wuyr|z|&{j_-H}LySBA>@_gvd)8C$~&b z&Gv~o1|-y;<)(lmD4g#1rU4dl2FefapAr9a+V)0AiGN@ELdb*SVO-x*Q=32JYO|)u zJ!&4A^0>Q3EV2xjn*+6r#>FZwKN(zzgl21q%zD+0EY1#n|2i6)plHY{l#2*4ZoTtb zXD3&Bz*Yq-Pi4y6@9NTfwEU2hue)%LZBcPtBua(6%r{hwL6h|5NHCet_ z1|Eh_X1v?A_40trvp-GgM#J_8mSr!KrU5xF{eYQP%ShZ?oAe+`a6vK3!r;VUM|{an zQ|W}dR0eDD-PH0mzYK*yVzWi7IP%-(hGb*(-hH6}hr!SGx&zu64EzM<|1B zAA2D)z8N5;cGyrN{!EEAQtPghM4z)qRBoxCOzwOWjaWd?rw!D2_sC^iX6BvUOi?o zB51PVoIP8Vt`jT9eCO1BOLAGA&VE{Z7cdWj1&%UJE4dt|l;@OFbk;TCaq8y2ddC@a zowuo-Hq_LFXEYAD^Gg`g@P<>^_k;T7VlxA#*Kmu;mDMk*HUb-vL7*<<>JAep(~1Jp ze~{F3L)l(?QIIdnt!)Fgis$W{!DMNwDr~sNsr~_z&MUc5W`M%nimM~hZNn#%H~+k$ zZFUMr8`^3prvFaG_p(SDd6`Jorj!I0X z@5Y7-U%g~a?T1bU#jZI}={u4M@g^5@DV}{|Ro_63|_-ry9teuTMFe@dfuCwTY2cZ47pW;J0qE{w4%Ua!5W~E3_+Q zNCud8QI@`VC0{X6={xE5#4Q&V1GB{Mi;plt@Fax`nZsf+TPa_8f_HPH~ zBBqm-bwyQ2Z(O_EBLPg51tn>Zy5D(Wv;p{13&m2*^xu8@Ci;gBA~;GK=e+f&C%^)X zj)UOPJhJi*b2WVpOQ~JJ#4vhC-?mfJ+FK2B086LDa!yjCNM5*~pIWJJ1#T67}XU(A4N%>AAI@h zc8Nc21SU|XKb-Ef3KYeuZU85^gQKt~{TjJ$OmRS6wk80m;xa*lXaV;IpswqafB6I@ zxPcC@8v7O?f)L2VN9^?nt>^#;3GFtjSH;{RVPV6dZ2Zxvg8ADO#S>5=H^%1>nZw$o z1Hi%=TH@`meXNjNPtf)|Ss&HL`ur>{O4%1D2$RZ1lM_GBmf-NabjA+rJ~I}A#vXd< zYgq%bbndgQI|&v56{?3%k+FZfd>PZ$tIo&9561xvgrpVh$iW9)F3D>T2Bkm-|ymEk)=Qyw(E3a^# z4DvV8(OC)+ONLiLmGl;oJ)ZsfJ&fdxZL1>9D~G(>=r~tS-$$154*Eqn&sHe5skXbS zmS^~}oNp&NUi`a$KK*YldHqcA?1q7Uegf|gJ!B4)N6wVP)<%~7Wu+Vu6 z#6Kv1kLdRNQAoLm?OBp_-o7t!b=IEpy@*0%s72dBz=BN>M5+GCvKNm7Q;UtZSxfe& zJoifvN+^EV*97hP;P*o+xDJ!cDXvZ}`~6ub-S<&-g#<3}l>PWb?ORCHawJEa0xB)q zXmUe+8`@b*ZJaqETt1oe#!n;WVK&{2n@d3Cv?DtF1@h8%m8fnzBJYh#<)qbu z6+P}X+U(%fEFp}yuwvu(b}j6WB|CLbX)%!rxVMoUX*-7V%i9MEuJ)U_5yi$Ge>wJC zQGEgg7g;D)O5lU}Ogz)EmFb1HOuQU_5RiGb7G6-HuhWqf4%-Ci z>dz`NwSat5O416nB{P`B$+Y&Ai7fyI7|;i6Dp|ag_5)N9>~NfH=UiHOaiv<@ZopdH zaCld)x2@h*KtGWPo|;bBXi6LW?g?O$L8!XL+z zAr8Lx#hCvBFkowiG2ac|^a+?%;X9>i0`ti=zG7ut?J9i@;z}d}Rir5j;y5IiO!;(+ zRs+N-NFMoRMAQ74Z>m6EF5{Ty;V$~{VQ|GPsKwrg>}?=@wYH5Uh^vP5E{%f#ow~={ zO~8y-+)xF$>LR%-!kKc(nvreS?q&d9LP_M*l_wp(O$BxBjv%Ja;0R3Zg72CN&H%_s zdCHm0IF-<`xnt2qxU3M|tyLM~4?zWBkI;7tHlyUXXM!;th)-5u1bjPa1t<|;C_}a@ ziM)El)QJr_rOg64Kab-^&@o77Ycr591jOUI5)|2^)~4wd?ez_?WGp9px=*gu7@S{$ z&Ym3fvd`++49uU<#;Q7!-@R)KXg-D*+YUdO+MYBpbL_awZYA zHa^dV_LvGI5dsgE*u3hvpp!LmxBYpgm0I)ZA^YbOE$(K=wGbMHL15K_F>o9{W zVsje-V7yF^qXoSib9nDqJ{(_%fBIx_8pk$zWHS*jz^pk08L0?uRrVonQCL<@METBc}h`e(ohjI$D_~LRub*3_4zWU7_=tE zHk;CHJKKyR-b0RbL-?5AtxJEl&f2>ftAPj)$H}EWxmtIQHu=6tn#R=!vsxka>*JX8 zh=mLx$hH%Om-sLXp7R2F9w)K=`2>N1jIrcw*^F3Fgbnq8<}ZOjg{C6CZ)o4yGx-u4 zGF`EfxMFbJ`XB6_2JQ=Wa}lRC>rQ*a+42WsVZ@i&K$&*1ub(x=1-|_RnrkY8r<8lv zS=op-bWkx0?Z}8~YT(v7dp&_m3L{RmEAMQsWs_Exxz#IENipT9E3}-k!4Y1kg4Fo) z30bH`d{@nKmpy1;(NJvCt=%Vo+}DMov>&&F@;_I+Ft8~@X5#9W3xv5TnKM$;{`6B3 zC23@6!t-E2@66 zkHapCy9D!(0@Wxm8IZvo&OBV8LHt`v1;`$kNgiha6{i)fr<)RsJ54<4H5d>aO%P^N z0TS>wSWQ0-ENQ_>h-78CV$WY7Bd#sxQ%C<^@~$kRdD82M00-n!PsS0JD}Fw=*fd0X zDq*mEe4Ulzcky|$17Gy1ZbiGW>{g($3op07N7Rp+Ee-_U^j~l}17DjUr{1&6NSBeuP>A`sY;B42JIdELf9@Z1S%^ zrDY)tCe{-wg@wer6@dw043@8%Q!qQhAn`l0Nmup*g($Si4a&71^wjB}J6ru#nHyyH zf+C_4$Qd6p9~UF7Uaz?}(n$pi-TfJz#%j|>InRKTxeWfG|pN=T%IuU=I+Frl#9r=5uFbf+^zqd z(XK}FG-Jn)@87Y;^TqgZ7v{%1($Dvnya?FfFWIYhTAEfe=yhhSQc3$OI~`^c4-;yS z)CC3lOdXM?pn}Zp6o;*o9G|UkU41w|MM1qOyr9N7&oRzGD{JDrs>FDct+^Cyl1F9C zD&1D7IudH()D}9;{R95nSlJb$eglkH5v|1XJJNL{Ah!jKl)s3h_zqUq^DD}bJ;vIE z)cTs&p#nzflwTkGFZLH$imzzIwq2>q=YgpYWX9MYt9Q+wv}V2n7=JuBf*1MZ0tPHt z8nelj3&jI~#{u#$L7?7E7HNtL)vsl%#`g^e@_Rs*ITUrkQvh<%p@dHKUq3<50;(cl z5b*QwCDD4R1cHQc<@F%|d;;*a&h}90)n(3>vrV1~ey$O3H*Tj`~Q;d?3ZlDD^GkoZt%R>mD>H!2=ZI6LJ z%~I%g%SdUR?cm0T)_X4*fT01NpoTOcVff(pyWtklN6~IX`C>mq zZiCXX!L-%*{Z}U!b488zUwx`9I?OV*_i8QWHG2Hw2;byj$iYe>f$0!{e`_%^-$-<>oLI1xtREvDiBOs{^f*h(K}$#g|s6^;OdjQ*H92_arYW<_u*xQO0h> z__wfXxRdhx-Xd?V&KSIWm3ErFy=nfI7*uar?q3~U??qag`$$DM#kF9h`hb1yK*dL` zVD{prV0OrNE0&RZOPz0(pCQONw&vRqMe3;kid}l(q>2{^y9|W(XPf}GjD#M>8s`1e zA^(W_w8Rftpyc%lJ#)dCZaXPyxbEBl9;|pgZqr}k;a}Vf^vIRKo2l_HKK*yhSc4h* zh7}-zZvB_g{dY%iIk@8e0P3tj#oE}v;nqJHTbXukEEwR|k>hX3MEwziG#%3o&mY^B zdSy5DjLg&-89ksW0q%$_(*pF82;R7Ya?{o3cddb5_1fLk7M3Ln?1C?9S6BJlu5`dx zHpCJ`pc*I{-uZ?$0oJ`I%yc3Fe2_I&vsIP9cp0!lkQtr}P=et(`;f4g$DcXW(!I5>z6?}rJ5}^Mk~5m zjOEM+*jl%=f>7DVu}K&CDf3S}geIF=B)I*I(f~ zaqFyx(2d-;p-44p@Mk-2#VdZJK=WxXzWj}X+|Ths16VBJG#qu`gtaI|E}5BL>s}wY zjXri@X1*a6NDA;Y{Ma`(mOuOoAO>g&EE9L)4&#W69_MxA|I(;llTHXdihVX_@8BYp zZ`D;PqAf&oxa@r}-(REoY0DP(2AJFskD_4|snwh{Vf&apTTp4|v2Vx{L`%?~`BOOj z7wA~*EqIGDLi;TgqL5maKv>TRgq&=HO3b9t)?3h}R`QnL$i=L#qV*H!K9%R4|IZv? z#Ud_aw&qu(>MSP86@ci^fUSUn&>T}`rAP+`sWS90!uI_^K_`TxLu0jaOS!2e^o zT8BE}e)+OEcS4YM&@Qu%U~v!+27Uqqs?dv2bQoM&xnQeX@o9*#{Yp&jFBrE25@w-l zN$$kc`!b3~P7qV;|B^7Q-a{4JwE2y__>_x#8#m;;uPOmN2H`({;ZeZ}Nt*;C2`C>R z?EtL1Mhtrh*4J^}+Nj-Bvgo2wL&93}Ak-94Nuc5^ti>0Tp6?{p2Cg<;v3NTAL@ID@ z_m!Pn=t2XjY2`doe$Pn#-|7S*Q+yYU?;f${4?PvK4g(`qeab!GXhp9dfvrvxEq}_g zUcO}F*Sk>;+Nx1#vD4i)c12z6*iWTmYj&@XqX~}4_1}#B&q_hK5CDgEniBC|3kMr+7`R+6ze}0(moqDk z8!P3)il9x-k%#WoJSQNc+fR<<7ob*4hPLuoL0}UV#V(#O22#Tn?G};GcmzrM{cxz$ z>%-CY?rDO{%fl$3Api}xzx~s~*isw$$5vK{g!rFu)wv$M%-QCoOsMNB4%DpDkr z>3x%xeXz-8n_~#x2#yj-%oAj@jT8?M0$|?S1h~C}^kiNSkdd#2^uK%J8bqvR2kY*(mRWoySea}s zX#YpRUol8E!6LBc|2ww)C*?nK2k6%SV@$HtU5W1byzeYz>Q`HlcUIYFVcm&x zCH=;o@>`64p^caUok(tt*F4YQDrRM)0zdlTIrDRtm~&Ncr>FdnD2&H%g0q42az!5e zF&y3rdehdzrNyG-I8i&bf5fWt^EJC3gDxNXi&+)G>{nC-;76cwXShsvU^k3?(R#i2 z7&M2K2U?HHYx)Kn33P9Y>gGdipd}GSsrOI#aELaphVQ+s8>#wrwjy$exv3q4g$kY> z-_M8!JDxfRSRdwqRAxn^fRoh8bH(}rm{F$CfD>YWvzs^UkQz*-K`+3ORyX&P2THb5 zX|_De&as>fgz0c`B&3KjikgHAfZTT_-h|x1)Fc%k9ivETMbMu6j&sKDef2=)zp~C^ z5~sr3uD9&pdl3wSRVrV4RNIYl56v41p9q2JAx=LEw$_N>$_9o&56TE(*BTQbe8FY?3?l2v2b zyTWaAmOxjAZG8Xry|3HfK%ai!xQQ;=B>H(>7;)XnJ^MBO47Ao$@yzvLR3)V%v8(& z$`gt{9gT~GTLlNvcmMTdX5Hf0d`^n8xo_{|qJA&i(d24g=W156sxCW>jOkvs#Gyy4 zCJN*wgM$f&qQ8FKX=S_;`E1p|6PK1%MX&TB*PQf!?A>cf^7ky|e*i(NF7UqqtCf=C zF9iPwm|Rq;Sgw*ew7PO1=1;W$eZU%9SSO(BW#KhGX3UiwMp)ATHHYvg znhg0h+r85fXfLkD2?5MvdN9^6&ianJJK+%=?4GX|H>*B)kJE6rxcmg7W}!{^SPwIj zVO0OD_cD;w0Lkzwo^8Sr!qZth<&rEJ0@hN&y=9igu*7Tgk4( zHE&8jhL;kY(@<%@QWRoMqsD1u1yQ`jy_|`$S}t$-ZSuFB;lYO-SG^~b zoJv(!2hS;7rnCNb()$;RCeBG6qklQ7XsW;Zm1>VG3b)_tYkQ+xexLl$afVlg+X8pC zk-jCAJ$O?uI{fG5nIiY0u{Sg@dacPz6?Y|fz5;A(lmTrGTgDe(!MXLjz~JD~-&YzN ztG1YIgt480qcVhTeIUvU>zJIiZ)+*Y>NbI!sMUo4R{rv{o+Jo6m}^2=o8|w_Mc}lz zPOWK5U9kMys&BbAt}1LzHLC#hzxxV3vgUp(PWbhm?BQZhId z)rp@jt_hn=Ll9{Z&kELs4M?sb;}o8FfVaE`)pXmd7`iRmvE9~SO?}{D?wA_2wCvQ~ z^_hUjdE_GoL)GPq*JTczY!k{3qE1DuOBuWpnej+(rU=!z3Twg`?Y2G*a9*D9h7GeA z2O5~`D(kt90h$rfkJvH@qWkjh8l{Co{pjV!jiE(hq1<=NyzMG(=Z$i=!TPA#S~;8% zeKm{Vhn%IJ?(mq(XsT5W&Enb~fM7Nf|9P|NwOfS)o7*1XfJ<-WfX4?>@2YIBrTHe^ z2Co1_**IF{9m^$tBGoOhB;LR^qM10f8=A=yN5k!ySG zOn|4?0N*35ui}7`ix1q~Uce&IdcC$R^@*OQ>%a?!>Mx7cD$d@^ z&&W*?x6kD@xhG20TH9YBR8*gb>9(?b31v1*k#8QxazaL=ytS!-HyP8E?2i@|l9#%N zdOW@8$$y@|j9G5hteutCF5<;`Yn6Q8f)kjmObR zelokzNDs!B`K19J1|;%M!^N67Cyx<}or?v>l71mIo*N!>SNrKlvJFlPnqP&*q!)J?>Ad^Hg#l(dd5Wk zcRs+h)R5s$JQ=cBzMO4E4>e0dR`)N`%ci6n-@Lak==~*!r%CFl`o<@c?o}Ksq#yGiThfHUeHve;GU`AN{>m}l=V zsKLeb^m?d&h;z?&+YugiMRGpJ>8@r`8}d;+N05;e zGK(0{Zh!G4s;j6MIWlL~*IMlN(;nM%ZxYy$vOikI+d8O~W8?I_4udvRmFY5?Zvt|& zvAT_h{LgD2X9dYW?e97TS})BuTW|7m(+y zFKUGbTgHsZLpzDS#e?vg>hGD;mtzWhZ5wmQ&Wk)M<&rVFqN{2^?Q)v^(}zJz^a657 zo>xFc*V*ClR5jyUHGo$F zSw>Q}us_FkTn+ zUa(UO3M^B^4_gQNZFqLS0*$69#+cxIuD%NBG``y?I;_}fvyf+Jl8oY+&jEhVW)wK% zU#i=Chm|hFW!r72oiDnnugS7Vuad|44AP>^2bVv-Ad#*nU19ITmz&Qbc4FPJZ)Cr7 z*Jf`1sw?JZrXvz+C!b*gNla?wkdjOP)0a33H8iw+o=?Sa<9XX+dY|PH|&; zQ%y0L)b8Mx-I+64|PonmkfI(G8Ou!!|B<5 zm0sC#6ZPKet{<<{;2v^JTP=0^A>t!LeLbHCQ!m_K2DC$hx!il+)?$?57e!LKT2^Wa z)urL0?=r|QpUI{@$5}VBLj;5cR`Qovz{a%OCd)Y8ruvVqu4Z!`xlRJTkAs`cJd#J> z;t1HB%hiHA+EI^3!*))3p6RGfF~ZP3^iCC?nhmcD$aN?y`XifcEv=aD61dU3Bp72~ zS9V^f!9SAc5a7R9klyyhvtj%+X)+O8N0Kff`x?<+xbnpkuA9q~_ux**n=Qm8)Gj z?H`;PHaUM%h7^b?l3aSw#Y%L-3~za!$Sgg@&1bUlfi2Icx0R5e$D~Z0cWd@O@Xfb5 z&65YH?@F=92Btnvk+&*{+4qa_b;1@O$IS7~Do~@d`zzG6*qch0^1NClz{>7gXx+mHv0J zk8)|=c}&arY1Pw7O87w6q*r0g3pJ2-H)4xT7n{_gYI|;ex@4SjBx4#`-E7m{Jk()R z8tUSeJ=j;+)Ww=9UP7qxJrk!oKc`1G;;gCnqH0a)aa`Q+=mXppt>JBo J}n$}%R z7TaOic&_Yh=<;xM*QETfXYFUApd_6zm+HgN9dMuT+(RGiGNtPr*mXR#W%LG5e|)!3Q1>93RfMbvD%ZEo_Cd;JUZ&lLmntY+px*4XJ&jzKTlKPHWN%^C%%yluZz zqvl##rvCxCl$YKVh=#nf4CKB!p3^ro+etb{8+LRp?$6^$i(z^sb?tuWPaFJ_*b#Ip z!iCRyvnhgSLCDt|Hof*l$ew)=8WrGbQ)SURX_jY`au&DaK=`CL8CBg@MZW!IuCOW7 z$1Jf@E%}F+D$}<}Y|r`qOKxWeHhsUG=Oyj7KP5~{PuYuqm8SWjFLGx{vD~F+xsM}MOzEvc*X;gNgdmzI#^`@1#W zG_o+$Qy8q>j5pv&=`kte=jS6`Zq*G%pwyFQnidG1Tis<-Lz1wmgC=_-RNW!RotTb2 z15>xhsoZ!%Z{_`hQ_Ns?JXMqFBM3z$)l1er8@3Amn4b28@oS$`*GM~RN(wT?KJ{_N zH`4}t>a4SSeVA?u2W6y@`ZMhye!yA}a;iLZdz)C(3Sa)CnO^5!7yGFO?ct_EyUspP z6bZn**;=IQ(>i5)DRlb&ePSLzn(E4rk*}+~+mo7&e2U|1gGDL;?8@rrI-~( z%=V+mHoh#oU{pp52p3iF84?8&F?pXKXpPh!c)^7@T}O6>h!rnjb_S0aYotamUPK3X z-z~%o?g;W~lE`5uiigc~^zJzn*x6uF4ubl-_(!2V#&@el^L#N)FokxDB zE@MslVWA5Q{2dFqRmytoVsbbby@1yv3e9XrwoeY(>zkx-7fvs3!v-yHrNf8tCQB}V za^E)eZ3Y)(kh2W}Cs#%w3)?8FB!-P2g7&Z$Gh|+;N{4r3Nv10dXwOaT2-iUGtUoJN zUeMTM*c5PNu@EO!?`ZlsAh0InbKgj#3`PCZb3^B;87+Uav0VWr$iO9DoLvdotHp+bR9q~e`56T-Pk-bLWEoS{>?M_&Ol(RA;Ho!I*+HgC zH?~E$Wm-;oY9PyGvwAbriPc5SRvY^dA5V3$9_6J7)t z&y8BAhSU`QX5`CB&Um6TX-<#YtHvOZA|C3<}d$ed{>}hRbT82_YMx3GjI8NPVy{LVD`Y4+DLL_P{*HN z$4uEd4|@OP&%Z3Is*n@$Aga4z!YYAnL9A#WR(JN2>l}RxyK>h_3X(AB!3$!mRGo1~ zyIiSHD%BZJNuV#zSXYdp88%wuG^uJSKc-b@9a_5WVowier-C5xoD}7cF9zI{7uZ2- zRNdXeZf%CmZ2m_!^+K3*NRd@)J^HsfCbl(~9C$OD+1NgAH+%tukxru^(Vad6C8z!Dq2qsecl2W1PijeSP$K%7UUmKe|i%)(DTbccM`Z!D-74-r9V9s zkTstZ&%80!Y}N|TX+;&<_`YmY++pGDf_nA*qMw)RoZUmjP_OccW{8AUc-+urha+D) zuxX?^i;26h68grh3^-#M;_EIX7RDrVhkYr%F5}PWwz+CeFh&uqVWuO$ce2N()zIaL zN1S9rODc)@B{C-sn%-!bz+S8ly+~iI8MZidGPm+vbP}%w#$J-TqONYFdvSmmLibVv zRH{HPoqmBo0ZoN3&f9vqJ-qwA+EJ{4Pi^W^!BcJ^Q{0JN#S=`VRt%1xga{aY4!ptg z4C~sAE%I4j03Nww+G6wl;}w#L_QtLPzm`}OxQOxtbGJ6 zmrNr10Im}-g*n#>kB%ciu=8fJCY)C309xi3&eq=O^ax}@%?mdjaj}bm!B=Eh6#b$Hg(l+!5V_hw^vER~Fg2~6RQ@MjrvrpoB!}qzX-1ryDOPW1>C4)>+<<2!I%sLIw`RXD7p#syJ3`IyhgNM{rLLv9xA&-X_R}j_pM*`!onMnU92x%;n|7try2$KSv1OA3v%N1 zWj>YrCbeqF^cv9=FM$&slS~ym%jT9_l#ldIo<1d(41KZ@jLJzCAt zB_zdW!}=hS31je#j?RrUp=@}4Sst_Iflp3v)AYN*;Icw7G&6&o5ZdE$<^6I9DU001 z|6rRl8o#(}3hRHesnmVy21ZbFGIZ8CKm%es%*Y(3OBLZMpN1tcQxn?kc+Qe(la0&q znensV`W1Eu^|xCw%m*b7#4Y;9RTpyrA7;O~@O0?{NZ4j&UY9H;ASL1ag_L=mv6pq) zflU{0<$rky!&bEw(WaUd$ClzzS1`+YE*$Q0(m7;fXl<`^Sk?H_W-6uCqLa1PgxbUy z1hH(9R8pWY@SJfS+4Dz)Ufr|Xa9_6W-QmmjRu~KI#Ry=_AXRXLSCDHdF~T&eo%0wL zbP9pbuDUB*h003x>0)&F%`bXxnVN9|*b@+jtm*=wPdvumM*y zo|#|M=Gn{7D}T9h`4W{k0z4f1Iq;Xjk}9CAHyLqH1D@{t90LZ>NeKP%K?~=od?o1{wVZL9yM=j!j5;+f)9_5oYkAJ${h;C@Rx7R2ksf~ zfKAl81TT!=`pK_~e0H1`6gn`rz$sZsoIXT=)-`30`s`~y_&FX&v9B*daG!WQ-uLZFBk7de8&snjwELM zNIzkgeh2qGL_R^BWDh5)J;Wh`wy7k;ZSv>;&FUXkQW-YL}K2cxMk@e zckZF@jO66Q@U%{b4&esl!9v+VUkN}hAvmoaxBO_6WZ!Fw!9Ey`BH3|HY9uMI4|ol} z#ipt=$g+fNVdhq16bUkbqnioRt8#D@If&xG@!F6>v)qPaUF1%&Ok!JX>wtLdEI#Bw z1-qB1n%zTB5%&x~ef|dTrn7gg)iCfW$i`Kw~0*KGjm Date: Sat, 19 Dec 2020 19:23:12 -0800 Subject: [PATCH 023/193] Fix method calls on custom Record subclasses The current implementation has a bunch of `CheckExact` calls in front of most `Record` methods, effectively making them unusable in subclasses. Relax the check to include the subclasses. While at it, add a check that `__init__` and `__new__` are not redefined on the provided Record subclass. Doing so is pointless, because `Record` instance initialization effectively bypasses both, so raise an `InterfaceError` to avoid needless confusion. Fixes: #676 --- asyncpg/connection.py | 31 ++++++++++++--- asyncpg/protocol/record/recordobj.c | 47 ++++++++++++++-------- asyncpg/protocol/record/recordobj.h | 1 + tests/test_record.py | 61 +++++++++++++++++++++++++++++ 4 files changed, 118 insertions(+), 22 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index d33db090..120e3623 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -354,6 +354,8 @@ async def _get_statement( ): if record_class is None: record_class = self._protocol.get_record_class() + else: + _check_record_class(record_class) if use_cache: statement = self._stmt_cache.get( @@ -1980,14 +1982,12 @@ async def connect(dsn=None, *, libpq-connect.html#LIBPQ-CONNSTRING """ if not issubclass(connection_class, Connection): - raise TypeError( + raise exceptions.InterfaceError( 'connection_class is expected to be a subclass of ' 'asyncpg.Connection, got {!r}'.format(connection_class)) - if not issubclass(record_class, protocol.Record): - raise TypeError( - 'record_class is expected to be a subclass of ' - 'asyncpg.Record, got {!r}'.format(record_class)) + if record_class is not protocol.Record: + _check_record_class(record_class) if loop is None: loop = asyncio.get_event_loop() @@ -2253,4 +2253,25 @@ def _extract_stack(limit=10): return ''.join(traceback.format_list(stack)) +def _check_record_class(record_class): + if record_class is protocol.Record: + pass + elif ( + isinstance(record_class, type) + and issubclass(record_class, protocol.Record) + ): + if ( + record_class.__new__ is not object.__new__ + or record_class.__init__ is not object.__init__ + ): + raise exceptions.InterfaceError( + 'record_class must not redefine __new__ or __init__' + ) + else: + raise exceptions.InterfaceError( + 'record_class is expected to be a subclass of ' + 'asyncpg.Record, got {!r}'.format(record_class) + ) + + _uid = 0 diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index b734ee9b..8ee27f59 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -227,26 +227,39 @@ record_richcompare(PyObject *v, PyObject *w, int op) Py_ssize_t vlen, wlen; int v_is_tuple = 0; int w_is_tuple = 0; + int v_is_record = 0; + int w_is_record = 0; int comp; - if (!ApgRecord_CheckExact(v)) { - if (!PyTuple_Check(v)) { - Py_RETURN_NOTIMPLEMENTED; - } + if (PyTuple_Check(v)) { v_is_tuple = 1; } + else if (ApgRecord_CheckExact(v)) { + v_is_record = 1; + } + else if (!ApgRecord_Check(v)) { + Py_RETURN_NOTIMPLEMENTED; + } - if (!ApgRecord_CheckExact(w)) { - if (!PyTuple_Check(w)) { - Py_RETURN_NOTIMPLEMENTED; - } + if (PyTuple_Check(w)) { w_is_tuple = 1; } + else if (ApgRecord_CheckExact(w)) { + w_is_record = 1; + } + else if (!ApgRecord_Check(w)) { + Py_RETURN_NOTIMPLEMENTED; + } + #define V_ITEM(i) \ - (v_is_tuple ? (PyTuple_GET_ITEM(v, i)) : (ApgRecord_GET_ITEM(v, i))) + (v_is_tuple ? \ + PyTuple_GET_ITEM(v, i) \ + : (v_is_record ? ApgRecord_GET_ITEM(v, i) : PySequence_GetItem(v, i))) #define W_ITEM(i) \ - (w_is_tuple ? (PyTuple_GET_ITEM(w, i)) : (ApgRecord_GET_ITEM(w, i))) + (w_is_tuple ? \ + PyTuple_GET_ITEM(w, i) \ + : (w_is_record ? ApgRecord_GET_ITEM(w, i) : PySequence_GetItem(w, i))) vlen = Py_SIZE(v); wlen = Py_SIZE(w); @@ -546,7 +559,7 @@ record_values(PyObject *o, PyObject *args) static PyObject * record_keys(PyObject *o, PyObject *args) { - if (!ApgRecord_CheckExact(o)) { + if (!ApgRecord_Check(o)) { PyErr_BadInternalCall(); return NULL; } @@ -558,7 +571,7 @@ record_keys(PyObject *o, PyObject *args) static PyObject * record_items(PyObject *o, PyObject *args) { - if (!ApgRecord_CheckExact(o)) { + if (!ApgRecord_Check(o)) { PyErr_BadInternalCall(); return NULL; } @@ -570,7 +583,7 @@ record_items(PyObject *o, PyObject *args) static int record_contains(ApgRecordObject *o, PyObject *arg) { - if (!ApgRecord_CheckExact(o)) { + if (!ApgRecord_Check(o)) { PyErr_BadInternalCall(); return -1; } @@ -686,7 +699,7 @@ record_iter_next(ApgRecordIterObject *it) seq = it->it_seq; if (seq == NULL) return NULL; - assert(ApgRecord_CheckExact(seq)); + assert(ApgRecord_Check(seq)); if (it->it_index < Py_SIZE(seq)) { item = ApgRecord_GET_ITEM(seq, it->it_index); @@ -742,7 +755,7 @@ record_iter(PyObject *seq) { ApgRecordIterObject *it; - if (!ApgRecord_CheckExact(seq)) { + if (!ApgRecord_Check(seq)) { PyErr_BadInternalCall(); return NULL; } @@ -800,7 +813,7 @@ record_items_next(ApgRecordItemsObject *it) if (seq == NULL) { return NULL; } - assert(ApgRecord_CheckExact(seq)); + assert(ApgRecord_Check(seq)); assert(it->it_key_iter != NULL); key = PyIter_Next(it->it_key_iter); @@ -880,7 +893,7 @@ record_new_items_iter(PyObject *seq) ApgRecordItemsObject *it; PyObject *key_iter; - if (!ApgRecord_CheckExact(seq)) { + if (!ApgRecord_Check(seq)) { PyErr_BadInternalCall(); return NULL; } diff --git a/asyncpg/protocol/record/recordobj.h b/asyncpg/protocol/record/recordobj.h index 2c6c1f1c..373c8967 100644 --- a/asyncpg/protocol/record/recordobj.h +++ b/asyncpg/protocol/record/recordobj.h @@ -37,6 +37,7 @@ extern PyTypeObject ApgRecordItems_Type; extern PyTypeObject ApgRecordDesc_Type; +#define ApgRecord_Check(self) PyObject_TypeCheck(self, &ApgRecord_Type) #define ApgRecord_CheckExact(o) (Py_TYPE(o) == &ApgRecord_Type) #define ApgRecordDesc_CheckExact(o) (Py_TYPE(o) == &ApgRecordDesc_Type) diff --git a/tests/test_record.py b/tests/test_record.py index 8abe90ee..5b85fb4d 100644 --- a/tests/test_record.py +++ b/tests/test_record.py @@ -513,3 +513,64 @@ async def test_record_subclass_04(self): r = await ps.fetch() self.assertIs(type(r[0]), asyncpg.Record) + + async def test_record_subclass_05(self): + class MyRecord(asyncpg.Record): + pass + + r = await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=MyRecord, + ) + self.assertIsInstance(r, MyRecord) + + self.assertEqual(list(r.items()), [('a', 1), ('b', '2')]) + self.assertEqual(list(r.keys()), ['a', 'b']) + self.assertEqual(list(r.values()), [1, '2']) + self.assertIn('b', r) + self.assertEqual(next(iter(r)), 1) + + async def test_record_subclass_06(self): + class MyRecord(asyncpg.Record): + def __init__(self): + raise AssertionError('this is not supposed to be called') + + class MyRecord2(asyncpg.Record): + def __new__(cls): + raise AssertionError('this is not supposed to be called') + + class MyRecordBad: + pass + + with self.assertRaisesRegex( + asyncpg.InterfaceError, + 'record_class must not redefine __new__ or __init__', + ): + await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=MyRecord, + ) + + with self.assertRaisesRegex( + asyncpg.InterfaceError, + 'record_class must not redefine __new__ or __init__', + ): + await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=MyRecord2, + ) + + with self.assertRaisesRegex( + asyncpg.InterfaceError, + 'record_class is expected to be a subclass of asyncpg.Record', + ): + await self.con.fetchrow( + "SELECT 1 as a, '2' as b", + record_class=MyRecordBad, + ) + + with self.assertRaisesRegex( + asyncpg.InterfaceError, + 'record_class is expected to be a subclass of asyncpg.Record', + ): + await self.connect(record_class=MyRecordBad) From c443374e8b30dd6ae027b52c76b20b3a8125ae71 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 20 Dec 2020 16:39:04 -0800 Subject: [PATCH 024/193] Adjust executemany() docs Remove the `copy_records_to_table()` note as `executemany()` is almost as efficient now and rephrase the note about the new behavior with respect to atomicity. --- asyncpg/connection.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 120e3623..e62928f7 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -321,23 +321,17 @@ async def executemany(self, command: str, args, *, timeout: float=None): :param float timeout: Optional timeout value in seconds. :return None: This method discards the results of the operations. - .. note:: - - When inserting a large number of rows, - use :meth:`Connection.copy_records_to_table()` instead, - it is much more efficient for this purpose. - .. versionadded:: 0.7.0 .. versionchanged:: 0.11.0 `timeout` became a keyword-only parameter. .. versionchanged:: 0.22.0 - The execution was changed to be in an implicit transaction if there - was no explicit transaction, so that it will no longer end up with - partial success. If you still need the previous behavior to - progressively execute many args, please use a loop with prepared - statement instead. + ``executemany()`` is now an atomic operation, which means that + either all executions succeed, or none at all. This is in contrast + to prior versions, where the effect of already-processed iterations + would remain in place when an error has occurred, unless + ``executemany()`` was called in a transaction. """ self._check_open() return await self._executemany(command, args, timeout) From 0dd636f73c19babdc5b1c5c755f84a7977d52856 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 20 Dec 2020 16:51:56 -0800 Subject: [PATCH 025/193] Update README Use absolute URL to the performance chart so that it renders properly on PyPI and elsewhere. --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index ce6f8c0d..4ee68b4d 100644 --- a/README.rst +++ b/README.rst @@ -31,12 +31,12 @@ Performance In our testing asyncpg is, on average, **3x** faster than psycopg2 (and its asyncio variant -- aiopg). -.. image:: performance.png +.. image:: https://raw.githubusercontent.com/MagicStack/asyncpg/master/performance.png :target: https://gistpreview.github.io/?b8eac294ac85da177ff82f784ff2cb60 The above results are a geometric mean of benchmarks obtained with PostgreSQL `client driver benchmarking toolbench `_ -in November 2020. +in November 2020 (click on the chart to see full details). Features From ff5da5f96fb675211ab09e586c833acb87eec19a Mon Sep 17 00:00:00 2001 From: Igor Khlepitko Date: Wed, 10 Feb 2021 06:19:18 +0100 Subject: [PATCH 026/193] Avoid unnecessary overhead during connection reset (#648) UNLISTEN is now available in Hot Standby mode in all supported PostgreSQL versions, therefore there's no reason anymore to wrap it in DO block. This should significantly speed up connection reset. --- asyncpg/connection.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index e62928f7..2b21247e 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1501,16 +1501,7 @@ def _get_reset_query(self): if caps.sql_close_all: _reset_query.append('CLOSE ALL;') if caps.notifications and caps.plpgsql: - _reset_query.append(''' - DO $$ - BEGIN - PERFORM * FROM pg_listening_channels() LIMIT 1; - IF FOUND THEN - UNLISTEN *; - END IF; - END; - $$; - ''') + _reset_query.append('UNLISTEN *;') if caps.sql_reset: _reset_query.append('RESET ALL;') From 232adcff95becd7370b0f637e54335e4d4d8642f Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Wed, 10 Feb 2021 16:19:55 +1100 Subject: [PATCH 027/193] docs: fix simple typo, verifiction -> verification (#682) There is a small typo in asyncpg/connection.py. Should read `verification` rather than `verifiction`. --- asyncpg/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 2b21247e..2e86fde0 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1881,7 +1881,7 @@ async def connect(dsn=None, *, if SSL connection fails - ``'allow'`` - currently equivalent to ``'prefer'`` - ``'require'`` - only try an SSL connection. Certificate - verifiction errors are ignored + verification errors are ignored - ``'verify-ca'`` - only try an SSL connection, and verify that the server certificate is issued by a trusted certificate authority (CA) From 022d0b5fe8de08778061e8a4cdd52d97489028f8 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 9 Feb 2021 21:21:47 -0800 Subject: [PATCH 028/193] Don't build aarch64 wheels for now The porting of arm64 builds to Github Actions uncovered an architecture-specific (or, perhaps, virtualization-specific) bug in asyncpg, so, rather than blocking the release, drop aarch64 wheels for now. Also, `manylinux2014_aarch64` is not considered stable yet (see pypa/manylinux#84) and so the compiled wheels might not even work correctly on all aarch64 boxes. --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 870d3551..0b480cb9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -74,7 +74,7 @@ jobs: matrix: python-version: [3.5, 3.6, 3.7, 3.8, 3.9] os: [ubuntu-20.04, macos-latest, windows-latest] - arch: [x86_64, aarch64] + arch: [x86_64] exclude: # Python 3.5 is unable to properly # find the recent VS tooling From 161607603adae789d4392502859cc7bbdec7240b Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 20 Dec 2020 16:16:38 -0800 Subject: [PATCH 029/193] asyncpg v0.22.0 A new asyncpg release is here. Notable additions include Python 3.9 support, support for recently added PostgreSQL types like `jsonpath`, and last but not least, vastly improved `executemany()` performance. Importantly, `executemany()` is also now _atomic_, which means that either all iterations succeed, or none at all, whereas previously partial results would have remained in place, unless `executemany()` was called in a transaction. There is also the usual assortment of improvements and bugfixes, see the details below. This is the last release of asyncpg that supports Python 3.5, which has reached EOL last September. Improvements ------------ * Vastly speedup executemany by batching protocol messages (#295) (by @fantix in 690048db for #295) * Allow using custom `Record` class (by @elprans in db4f1a6c for #577) * Add Python 3.9 support (#610) (by @elprans in c05d7260 for #610) * Prefer SSL connections by default (#660) (by @elprans in 16183aa0 for #660) * Add codecs for a bunch of new builtin types (#665) (by @elprans in b53f0384 for #665) * Expose Pool as `asyncpg.Pool` (#669) (by @rugleb in 0e0eb8d3 for #669) * Avoid unnecessary overhead during connection reset (#648) (by @kitogo in ff5da5f9 for #648) Fixes ----- * Add a workaround for bpo-37658 (by @elprans in 2bac166c for #21894) * Fix wrong default transaction isolation level (#622) (by @fantix in 4a627d55 for #622) * Fix `set_type_codec()` to accept standard SQL type names (#619) (by @elprans in 68b40cbf for #619) * Ignore custom data codec for internal introspection (#618) (by @fantix in e064f59e for #618) * Fix null/NULL quoting in array text encoder (#627) (by @fantix in 92aa8062 for #627) * Fix link in connect docstring (#653) (by @samuelcolvin in 8b313bde for #653) * Make asyncpg work with pyinstaller (#651) (by @Atem18 in 5ddabb19 for #651) * Fix possible `AttributeError` exception in `ConnectionSettings` (#632) (by @petriborg in 0d231820 for #632) * Prohibit custom codecs on domains (by @elprans in 50f964fc for #457) * Raise proper error on anonymous composite input (tuple arguments) (#664) (by @elprans in 7252dbeb for #664) * Fix incorrect application of custom codecs in some cases (#662) (by @elprans in 50f65fbb for #662) --- .github/workflows/release.yml | 7 +++---- asyncpg/_version.py | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0b480cb9..6a578425 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -295,7 +295,6 @@ jobs: release_name: v${{ steps.relver.outputs.version }} target: ${{ github.event.pull_request.base.ref }} body: ${{ github.event.pull_request.body }} - draft: true - run: | ls -al dist/ @@ -304,6 +303,6 @@ jobs: uses: pypa/gh-action-pypi-publish@master with: user: __token__ - # password: ${{ secrets.PYPI_TOKEN }} - password: ${{ secrets.TEST_PYPI_TOKEN }} - repository_url: https://test.pypi.org/legacy/ + password: ${{ secrets.PYPI_TOKEN }} + # password: ${{ secrets.TEST_PYPI_TOKEN }} + # repository_url: https://test.pypi.org/legacy/ diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 1d2d172d..c5fffb10 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.22.0.dev0' +__version__ = '0.22.0' From a308a9736ebff5f8d04d9072f375496a66f6ba33 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 9 Feb 2021 22:01:19 -0800 Subject: [PATCH 030/193] Post-release version bump --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index c5fffb10..32b8dffc 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.22.0' +__version__ = '0.23.0.dev0' From d6eea8ede0fef9c41196e0d48999da378e4434a4 Mon Sep 17 00:00:00 2001 From: BeatButton Date: Mon, 8 Mar 2021 15:24:17 -0700 Subject: [PATCH 031/193] Avoid TypeError in Transaction.__repr__ (#703) Only add isolation to repr if it's not None --- asyncpg/transaction.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/asyncpg/transaction.py b/asyncpg/transaction.py index 4c799925..61e23f18 100644 --- a/asyncpg/transaction.py +++ b/asyncpg/transaction.py @@ -234,7 +234,8 @@ def __repr__(self): attrs = [] attrs.append('state:{}'.format(self._state.name.lower())) - attrs.append(self._isolation) + if self._isolation is not None: + attrs.append(self._isolation) if self._readonly: attrs.append('readonly') if self._deferrable: From 359a34c4da1b90135263f15fc35ec397cc0ed349 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Tue, 9 Mar 2021 11:31:56 -0600 Subject: [PATCH 032/193] Feed memoryview to writelines() (#715) This fixes an issue in 0.22.0 where we passed WriteBuffer to writelines by mistake, which leads to an error under SSL and uvloop - the implementation that calls len() on each line of writelines(). Fixes: #700 --- asyncpg/protocol/coreproto.pyx | 4 ++-- tests/test_connect.py | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 12ebf6c6..e7d7c2bc 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -952,7 +952,7 @@ cdef class CoreProtocol: else: # otherwise, append SYNC and send the buffers packet.write_bytes(SYNC_MESSAGE) - buffers.append(packet) + buffers.append(memoryview(packet)) self._writelines(buffers) return False @@ -976,7 +976,7 @@ cdef class CoreProtocol: ) # collected one buffer - buffers.append(packet) + buffers.append(memoryview(packet)) # write to the wire, and signal the caller for more to send self._writelines(buffers) diff --git a/tests/test_connect.py b/tests/test_connect.py index af927426..5adb977d 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1247,6 +1247,30 @@ async def worker(): await asyncio.gather(*tasks) await pool.close() + async def test_executemany_uvloop_ssl_issue_700(self): + ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ssl_context.load_verify_locations(SSL_CA_CERT_FILE) + + con = await self.connect( + host='localhost', + user='ssl_user', + ssl=ssl_context) + + try: + await con.execute('CREATE TABLE test_many (v int)') + await con.executemany( + 'INSERT INTO test_many VALUES ($1)', + [(x + 1,) for x in range(100)] + ) + self.assertEqual( + await con.fetchval('SELECT sum(v) FROM test_many'), 5050 + ) + finally: + try: + await con.execute('DROP TABLE test_many') + finally: + await con.close() + class TestConnectionGC(tb.ClusterTestCase): From 53bea985bfda80f0af9eb3659f8e4568677e27e8 Mon Sep 17 00:00:00 2001 From: Fernando ike Date: Wed, 17 Mar 2021 17:25:38 -0300 Subject: [PATCH 033/193] Updated int the doc PostgreSQL versions supported (#717) The README.md says that PostgreSQL versions supported are 9.5 to 13, copied to the doc. Co-authored-by: Fernando Ike --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index ecf40acb..77bf19f3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,7 +15,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. **asyncpg** requires Python 3.5 or later and is supported for PostgreSQL -versions 9.2 to 12. +versions 9.5 to 13. Contents -------- From 93a238c0730dd90e96f4a1676ce1c508c4c9bd8f Mon Sep 17 00:00:00 2001 From: Fantix King Date: Tue, 23 Mar 2021 22:59:32 -0400 Subject: [PATCH 034/193] Upgrade pip and setuptools in workflow Python 3.5 on GitHub Action ran into some certificate issue that newer version of pip doesn't have, so upgrading to the latest by default. --- .github/workflows/tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d92571a9..5b021809 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -62,6 +62,7 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | + pip install -U pip setuptools pip install -e .[test] - name: Test @@ -117,6 +118,7 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | + pip install -U pip setuptools pip install -e .[test] - name: Test From 075114c195e9eb4e81c8365d81540beefb46065c Mon Sep 17 00:00:00 2001 From: Fantix King Date: Tue, 23 Mar 2021 23:40:00 -0400 Subject: [PATCH 035/193] Add sslmode=allow support and fix =prefer retry (#720) We didn't really retry the connection without SSL if the first SSL connection fails under sslmode=prefer, that led to an issue when the server has SSL support but explicitly denies SSL connection through pg_hba.conf. This commit adds a retry in a new connection, which makes it easy to implement the sslmode=allow retry. Fixes #716 --- asyncpg/connect_utils.py | 148 +++++++++++++++++------- asyncpg/connection.py | 3 +- asyncpg/protocol/protocol.pxd | 2 + asyncpg/protocol/protocol.pyx | 10 ++ tests/test_connect.py | 208 +++++++++++++++++++++++++++++++--- 5 files changed, 314 insertions(+), 57 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index acfe87e4..3fd64252 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -7,6 +7,7 @@ import asyncio import collections +import enum import functools import getpass import os @@ -28,6 +29,21 @@ from . import protocol +class SSLMode(enum.IntEnum): + disable = 0 + allow = 1 + prefer = 2 + require = 3 + verify_ca = 4 + verify_full = 5 + + @classmethod + def parse(cls, sslmode): + if isinstance(sslmode, cls): + return sslmode + return getattr(cls, sslmode.replace('-', '_')) + + _ConnectionParameters = collections.namedtuple( 'ConnectionParameters', [ @@ -35,7 +51,7 @@ 'password', 'database', 'ssl', - 'ssl_is_advisory', + 'sslmode', 'connect_timeout', 'server_settings', ]) @@ -402,46 +418,29 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if ssl is None and have_tcp_addrs: ssl = 'prefer' - # ssl_is_advisory is only allowed to come from the sslmode parameter. - ssl_is_advisory = None - if isinstance(ssl, str): - SSLMODES = { - 'disable': 0, - 'allow': 1, - 'prefer': 2, - 'require': 3, - 'verify-ca': 4, - 'verify-full': 5, - } + if isinstance(ssl, (str, SSLMode)): try: - sslmode = SSLMODES[ssl] - except KeyError: - modes = ', '.join(SSLMODES.keys()) + sslmode = SSLMode.parse(ssl) + except AttributeError: + modes = ', '.join(m.name.replace('_', '-') for m in SSLMode) raise exceptions.InterfaceError( '`sslmode` parameter must be one of: {}'.format(modes)) - # sslmode 'allow' is currently handled as 'prefer' because we're - # missing the "retry with SSL" behavior for 'allow', but do have the - # "retry without SSL" behavior for 'prefer'. - # Not changing 'allow' to 'prefer' here would be effectively the same - # as changing 'allow' to 'disable'. - if sslmode == SSLMODES['allow']: - sslmode = SSLMODES['prefer'] - # docs at https://www.postgresql.org/docs/10/static/libpq-connect.html # Not implemented: sslcert & sslkey & sslrootcert & sslcrl params. - if sslmode <= SSLMODES['allow']: + if sslmode < SSLMode.allow: ssl = False - ssl_is_advisory = sslmode >= SSLMODES['allow'] else: ssl = ssl_module.create_default_context() - ssl.check_hostname = sslmode >= SSLMODES['verify-full'] + ssl.check_hostname = sslmode >= SSLMode.verify_full ssl.verify_mode = ssl_module.CERT_REQUIRED - if sslmode <= SSLMODES['require']: + if sslmode <= SSLMode.require: ssl.verify_mode = ssl_module.CERT_NONE - ssl_is_advisory = sslmode <= SSLMODES['prefer'] elif ssl is True: ssl = ssl_module.create_default_context() + sslmode = SSLMode.verify_full + else: + sslmode = SSLMode.disable if server_settings is not None and ( not isinstance(server_settings, dict) or @@ -453,7 +452,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, - ssl_is_advisory=ssl_is_advisory, connect_timeout=connect_timeout, + sslmode=sslmode, connect_timeout=connect_timeout, server_settings=server_settings) return addrs, params @@ -520,9 +519,8 @@ def data_received(self, data): data == b'N'): # ssl_is_advisory will imply that ssl.verify_mode == CERT_NONE, # since the only way to get ssl_is_advisory is from - # sslmode=prefer (or sslmode=allow). But be extra sure to - # disallow insecure connections when the ssl context asks for - # real security. + # sslmode=prefer. But be extra sure to disallow insecure + # connections when the ssl context asks for real security. self.on_data.set_result(False) else: self.on_data.set_exception( @@ -566,6 +564,7 @@ async def _create_ssl_connection(protocol_factory, host, port, *, new_tr = tr pg_proto = protocol_factory() + pg_proto.is_ssl = do_ssl_upgrade pg_proto.connection_made(new_tr) new_tr.set_protocol(pg_proto) @@ -584,7 +583,9 @@ async def _create_ssl_connection(protocol_factory, host, port, *, tr.close() try: - return await conn_factory(sock=sock) + new_tr, pg_proto = await conn_factory(sock=sock) + pg_proto.is_ssl = do_ssl_upgrade + return new_tr, pg_proto except (Exception, asyncio.CancelledError): sock.close() raise @@ -605,8 +606,6 @@ async def _connect_addr( if timeout <= 0: raise asyncio.TimeoutError - connected = _create_future(loop) - params_input = params if callable(params.password): if inspect.iscoroutinefunction(params.password): @@ -615,6 +614,49 @@ async def _connect_addr( password = params.password() params = params._replace(password=password) + args = (addr, loop, config, connection_class, record_class, params_input) + + # prepare the params (which attempt has ssl) for the 2 attempts + if params.sslmode == SSLMode.allow: + params_retry = params + params = params._replace(ssl=None) + elif params.sslmode == SSLMode.prefer: + params_retry = params._replace(ssl=None) + else: + # skip retry if we don't have to + return await __connect_addr(params, timeout, False, *args) + + # first attempt + before = time.monotonic() + try: + return await __connect_addr(params, timeout, True, *args) + except _Retry: + pass + + # second attempt + timeout -= time.monotonic() - before + if timeout <= 0: + raise asyncio.TimeoutError + else: + return await __connect_addr(params_retry, timeout, False, *args) + + +class _Retry(Exception): + pass + + +async def __connect_addr( + params, + timeout, + retry, + addr, + loop, + config, + connection_class, + record_class, + params_input, +): + connected = _create_future(loop) proto_factory = lambda: protocol.Protocol( addr, connected, params, record_class, loop) @@ -625,7 +667,7 @@ async def _connect_addr( elif params.ssl: connector = _create_ssl_connection( proto_factory, *addr, loop=loop, ssl_context=params.ssl, - ssl_is_advisory=params.ssl_is_advisory) + ssl_is_advisory=params.sslmode == SSLMode.prefer) else: connector = loop.create_connection(proto_factory, *addr) @@ -638,6 +680,35 @@ async def _connect_addr( if timeout <= 0: raise asyncio.TimeoutError await compat.wait_for(connected, timeout=timeout) + except ( + exceptions.InvalidAuthorizationSpecificationError, + exceptions.ConnectionDoesNotExistError, # seen on Windows + ): + tr.close() + + # retry=True here is a redundant check because we don't want to + # accidentally raise the internal _Retry to the outer world + if retry and ( + params.sslmode == SSLMode.allow and not pr.is_ssl or + params.sslmode == SSLMode.prefer and pr.is_ssl + ): + # Trigger retry when: + # 1. First attempt with sslmode=allow, ssl=None failed + # 2. First attempt with sslmode=prefer, ssl=ctx failed while the + # server claimed to support SSL (returning "S" for SSLRequest) + # (likely because pg_hba.conf rejected the connection) + raise _Retry() + + else: + # but will NOT retry if: + # 1. First attempt with sslmode=prefer failed but the server + # doesn't support SSL (returning 'N' for SSLRequest), because + # we already tried to connect without SSL thru ssl_is_advisory + # 2. Second attempt with sslmode=prefer, ssl=None failed + # 3. Second attempt with sslmode=allow, ssl=ctx failed + # 4. Any other sslmode + raise + except (Exception, asyncio.CancelledError): tr.close() raise @@ -684,6 +755,7 @@ class CancelProto(asyncio.Protocol): def __init__(self): self.on_disconnect = _create_future(loop) + self.is_ssl = False def connection_lost(self, exc): if not self.on_disconnect.done(): @@ -692,13 +764,13 @@ def connection_lost(self, exc): if isinstance(addr, str): tr, pr = await loop.create_unix_connection(CancelProto, addr) else: - if params.ssl: + if params.ssl and params.sslmode != SSLMode.allow: tr, pr = await _create_ssl_connection( CancelProto, *addr, loop=loop, ssl_context=params.ssl, - ssl_is_advisory=params.ssl_is_advisory) + ssl_is_advisory=params.sslmode == SSLMode.prefer) else: tr, pr = await loop.create_connection( CancelProto, *addr) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 2e86fde0..043c6ddd 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1879,7 +1879,8 @@ async def connect(dsn=None, *, - ``'disable'`` - SSL is disabled (equivalent to ``False``) - ``'prefer'`` - try SSL first, fallback to non-SSL connection if SSL connection fails - - ``'allow'`` - currently equivalent to ``'prefer'`` + - ``'allow'`` - try without SSL first, then retry with SSL if the first + attempt fails. - ``'require'`` - only try an SSL connection. Certificate verification errors are ignored - ``'verify-ca'`` - only try an SSL connection, and verify diff --git a/asyncpg/protocol/protocol.pxd b/asyncpg/protocol/protocol.pxd index 772d6432..5f144e55 100644 --- a/asyncpg/protocol/protocol.pxd +++ b/asyncpg/protocol/protocol.pxd @@ -52,6 +52,8 @@ cdef class BaseProtocol(CoreProtocol): readonly uint64_t queries_count + bint _is_ssl + PreparedStatementState statement cdef get_connection(self) diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 4df256e6..3a1594a5 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -103,6 +103,8 @@ cdef class BaseProtocol(CoreProtocol): self.queries_count = 0 + self._is_ssl = False + try: self.create_future = loop.create_future except AttributeError: @@ -943,6 +945,14 @@ cdef class BaseProtocol(CoreProtocol): def resume_writing(self): self.writing_allowed.set() + @property + def is_ssl(self): + return self._is_ssl + + @is_ssl.setter + def is_ssl(self, value): + self._is_ssl = value + class Timer: def __init__(self, budget): diff --git a/tests/test_connect.py b/tests/test_connect.py index 5adb977d..7b08f93d 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -24,6 +24,7 @@ from asyncpg import connect_utils from asyncpg import cluster as pg_cluster from asyncpg import exceptions +from asyncpg.connect_utils import SSLMode from asyncpg.serverversion import split_server_version_string _system = platform.uname().system @@ -308,6 +309,7 @@ class TestConnectParams(tb.TestCase): TESTS = [ { + 'name': 'all_env_default_ssl', 'env': { 'PGUSER': 'user', 'PGDATABASE': 'testdb', @@ -320,10 +322,11 @@ class TestConnectParams(tb.TestCase): 'password': 'passw', 'database': 'testdb', 'ssl': True, - 'ssl_is_advisory': True}) + 'sslmode': SSLMode.prefer}) }, { + 'name': 'params_override_env', 'env': { 'PGUSER': 'user', 'PGDATABASE': 'testdb', @@ -345,6 +348,56 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'params_override_env_and_dsn', + 'env': { + 'PGUSER': 'user', + 'PGDATABASE': 'testdb', + 'PGPASSWORD': 'passw', + 'PGHOST': 'host', + 'PGPORT': '123', + 'PGSSLMODE': 'allow' + }, + + 'dsn': 'postgres://user3:123123@localhost/abcdef', + + 'host': 'host2', + 'port': '456', + 'user': 'user2', + 'password': 'passw2', + 'database': 'db2', + 'ssl': False, + + 'result': ([('host2', 456)], { + 'user': 'user2', + 'password': 'passw2', + 'database': 'db2', + 'sslmode': SSLMode.disable, + 'ssl': False}) + }, + + { + 'name': 'dsn_overrides_env_partially', + 'env': { + 'PGUSER': 'user', + 'PGDATABASE': 'testdb', + 'PGPASSWORD': 'passw', + 'PGHOST': 'host', + 'PGPORT': '123', + 'PGSSLMODE': 'allow' + }, + + 'dsn': 'postgres://user3:123123@localhost:5555/abcdef', + + 'result': ([('localhost', 5555)], { + 'user': 'user3', + 'password': '123123', + 'database': 'abcdef', + 'ssl': True, + 'sslmode': SSLMode.allow}) + }, + + { + 'name': 'params_override_env_and_dsn_ssl_prefer', 'env': { 'PGUSER': 'user', 'PGDATABASE': 'testdb', @@ -367,10 +420,12 @@ class TestConnectParams(tb.TestCase): 'user': 'user2', 'password': 'passw2', 'database': 'db2', + 'sslmode': SSLMode.disable, 'ssl': False}) }, { + 'name': 'dsn_overrides_env_partially_ssl_prefer', 'env': { 'PGUSER': 'user', 'PGDATABASE': 'testdb', @@ -387,10 +442,11 @@ class TestConnectParams(tb.TestCase): 'password': '123123', 'database': 'abcdef', 'ssl': True, - 'ssl_is_advisory': True}) + 'sslmode': SSLMode.prefer}) }, { + 'name': 'dsn_only', 'dsn': 'postgres://user3:123123@localhost:5555/abcdef', 'result': ([('localhost', 5555)], { 'user': 'user3', @@ -399,6 +455,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_only_multi_host', 'dsn': 'postgresql://user@host1,host2/db', 'result': ([('host1', 5432), ('host2', 5432)], { 'database': 'db', @@ -407,6 +464,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_only_multi_host_and_port', 'dsn': 'postgresql://user@host1:1111,host2:2222/db', 'result': ([('host1', 1111), ('host2', 2222)], { 'database': 'db', @@ -415,6 +473,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_combines_env_multi_host', 'env': { 'PGHOST': 'host1:1111,host2:2222', 'PGUSER': 'foo', @@ -427,6 +486,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_multi_host_combines_env', 'env': { 'PGUSER': 'foo', }, @@ -438,6 +498,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'params_multi_host_dsn_env_mix', 'env': { 'PGUSER': 'foo', }, @@ -450,6 +511,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'params_combine_dsn_settings_override_and_ssl', 'dsn': 'postgresql://user3:123123@localhost:5555/' 'abcdef?param=sss¶m=123&host=testhost&user=testuser' '&port=2222&database=testdb&sslmode=require', @@ -464,10 +526,11 @@ class TestConnectParams(tb.TestCase): 'password': 'ask', 'database': 'db', 'ssl': True, - 'ssl_is_advisory': False}) + 'sslmode': SSLMode.require}) }, { + 'name': 'params_settings_and_ssl_override_dsn', 'dsn': 'postgresql://user3:123123@localhost:5555/' 'abcdef?param=sss¶m=123&host=testhost&user=testuser' '&port=2222&database=testdb&sslmode=disable', @@ -483,10 +546,12 @@ class TestConnectParams(tb.TestCase): 'user': 'me', 'password': 'ask', 'database': 'db', + 'sslmode': SSLMode.verify_full, 'ssl': True}) }, { + 'name': 'dsn_only_unix', 'dsn': 'postgresql:///dbname?host=/unix_sock/test&user=spam', 'result': ([os.path.join('/unix_sock/test', '.s.PGSQL.5432')], { 'user': 'spam', @@ -494,6 +559,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_only_quoted', 'dsn': 'postgresql://us%40r:p%40ss@h%40st1,h%40st2:543%33/d%62', 'result': ( [('h@st1', 5432), ('h@st2', 5433)], @@ -506,6 +572,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_only_unquoted_host', 'dsn': 'postgresql://user:p@ss@host/db', 'result': ( [('ss@host', 5432)], @@ -518,6 +585,7 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_only_quoted_params', 'dsn': 'postgresql:///d%62?user=us%40r&host=h%40st&port=543%33', 'result': ( [('h@st', 5433)], @@ -529,10 +597,12 @@ class TestConnectParams(tb.TestCase): }, { + 'name': 'dsn_only_illegal_protocol', 'dsn': 'pq:///dbname?host=/unix_sock/test&user=spam', 'error': (ValueError, 'invalid DSN') }, { + 'name': 'dsn_params_ports_mismatch_dsn_multi_hosts', 'dsn': 'postgresql://host1,host2,host3/db', 'port': [111, 222], 'error': ( @@ -541,17 +611,20 @@ class TestConnectParams(tb.TestCase): ) }, { + 'name': 'dsn_only_quoted_unix_host_port_in_params', 'dsn': 'postgres://user@?port=56226&host=%2Ftmp', 'result': ( [os.path.join('/tmp', '.s.PGSQL.56226')], { 'user': 'user', 'database': 'user', + 'sslmode': SSLMode.disable, 'ssl': None } ) }, { + 'name': 'dsn_only_cloudsql', 'dsn': 'postgres:///db?host=/cloudsql/' 'project:region:instance-name&user=spam', 'result': ( @@ -565,6 +638,7 @@ class TestConnectParams(tb.TestCase): ) }, { + 'name': 'dsn_only_cloudsql_unix_and_tcp', 'dsn': 'postgres:///db?host=127.0.0.1:5432,/cloudsql/' 'project:region:instance-name,localhost:5433&user=spam', 'result': ( @@ -579,7 +653,7 @@ class TestConnectParams(tb.TestCase): 'user': 'spam', 'database': 'db', 'ssl': True, - 'ssl_is_advisory': True + 'sslmode': SSLMode.prefer, } ) }, @@ -663,7 +737,7 @@ def run_testcase(self, testcase): # Avoid the hassle of specifying the default SSL mode # unless explicitly tested for. params.pop('ssl', None) - params.pop('ssl_is_advisory', None) + params.pop('sslmode', None) self.assertEqual(expected, result, 'Testcase: {}'.format(testcase)) @@ -1050,6 +1124,7 @@ async def verify_works(sslmode): dsn='postgresql://foo/?sslmode=' + sslmode, host='localhost') self.assertEqual(await con.fetchval('SELECT 42'), 42) + self.assertFalse(con._protocol.is_ssl) finally: if con: await con.close() @@ -1058,7 +1133,7 @@ async def verify_fails(sslmode): con = None try: with self.assertRaises(ConnectionError): - await self.connect( + con = await self.connect( dsn='postgresql://foo/?sslmode=' + sslmode, host='localhost') await con.fetchval('SELECT 42') @@ -1082,8 +1157,7 @@ async def test_connection_implicit_host(self): await con.close() -@unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') -class TestSSLConnection(tb.ConnectedTestCase): +class BaseTestSSLConnection(tb.ConnectedTestCase): @classmethod def get_server_settings(cls): conf = super().get_server_settings() @@ -1109,15 +1183,7 @@ def setUp(self): create_script = [] create_script.append('CREATE ROLE ssl_user WITH LOGIN;') - self.cluster.add_hba_entry( - type='hostssl', address=ipaddress.ip_network('127.0.0.0/24'), - database='postgres', user='ssl_user', - auth_method='trust') - - self.cluster.add_hba_entry( - type='hostssl', address=ipaddress.ip_network('::1/128'), - database='postgres', user='ssl_user', - auth_method='trust') + self._add_hba_entry() # Put hba changes into effect self.cluster.reload() @@ -1136,6 +1202,23 @@ def tearDown(self): super().tearDown() + def _add_hba_entry(self): + raise NotImplementedError() + + +@unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') +class TestSSLConnection(BaseTestSSLConnection): + def _add_hba_entry(self): + self.cluster.add_hba_entry( + type='hostssl', address=ipaddress.ip_network('127.0.0.0/24'), + database='postgres', user='ssl_user', + auth_method='trust') + + self.cluster.add_hba_entry( + type='hostssl', address=ipaddress.ip_network('::1/128'), + database='postgres', user='ssl_user', + auth_method='trust') + async def test_ssl_connection_custom_context(self): ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ssl_context.load_verify_locations(SSL_CA_CERT_FILE) @@ -1164,6 +1247,7 @@ async def verify_works(sslmode, *, host='localhost'): host=host, user='ssl_user') self.assertEqual(await con.fetchval('SELECT 42'), 42) + self.assertTrue(con._protocol.is_ssl) finally: if con: await con.close() @@ -1176,7 +1260,7 @@ async def verify_fails(sslmode, *, host='localhost', try: self.loop.set_exception_handler(lambda *args: None) with self.assertRaises(exn_type): - await self.connect( + con = await self.connect( dsn='postgresql://foo/?sslmode=' + sslmode, host=host, user='ssl_user') @@ -1272,6 +1356,94 @@ async def test_executemany_uvloop_ssl_issue_700(self): await con.close() +@unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') +class TestNoSSLConnection(BaseTestSSLConnection): + def _add_hba_entry(self): + self.cluster.add_hba_entry( + type='hostnossl', address=ipaddress.ip_network('127.0.0.0/24'), + database='postgres', user='ssl_user', + auth_method='trust') + + self.cluster.add_hba_entry( + type='hostnossl', address=ipaddress.ip_network('::1/128'), + database='postgres', user='ssl_user', + auth_method='trust') + + async def test_nossl_connection_sslmode(self): + async def verify_works(sslmode, *, host='localhost'): + con = None + try: + con = await self.connect( + dsn='postgresql://foo/?sslmode=' + sslmode, + host=host, + user='ssl_user') + self.assertEqual(await con.fetchval('SELECT 42'), 42) + self.assertFalse(con._protocol.is_ssl) + finally: + if con: + await con.close() + + async def verify_fails(sslmode, *, host='localhost', + exn_type=ssl.SSLError): + # XXX: uvloop artifact + old_handler = self.loop.get_exception_handler() + con = None + try: + self.loop.set_exception_handler(lambda *args: None) + with self.assertRaises(exn_type): + con = await self.connect( + dsn='postgresql://foo/?sslmode=' + sslmode, + host=host, + user='ssl_user') + await con.fetchval('SELECT 42') + finally: + if con: + await con.close() + self.loop.set_exception_handler(old_handler) + + invalid_auth_err = asyncpg.InvalidAuthorizationSpecificationError + await verify_works('disable') + await verify_works('allow') + await verify_works('prefer') + await verify_fails('require', exn_type=invalid_auth_err) + await verify_fails('verify-ca') + await verify_fails('verify-full') + + async def test_nossl_connection_prefer_cancel(self): + con = await self.connect( + dsn='postgresql://foo/?sslmode=prefer', + host='localhost', + user='ssl_user') + self.assertFalse(con._protocol.is_ssl) + with self.assertRaises(asyncio.TimeoutError): + await con.execute('SELECT pg_sleep(5)', timeout=0.5) + val = await con.fetchval('SELECT 123') + self.assertEqual(val, 123) + + async def test_nossl_connection_pool(self): + pool = await self.create_pool( + host='localhost', + user='ssl_user', + database='postgres', + min_size=5, + max_size=10, + ssl='prefer') + + async def worker(): + async with pool.acquire() as con: + self.assertFalse(con._protocol.is_ssl) + self.assertEqual(await con.fetchval('SELECT 42'), 42) + + with self.assertRaises(asyncio.TimeoutError): + await con.execute('SELECT pg_sleep(5)', timeout=0.5) + + self.assertEqual(await con.fetchval('SELECT 43'), 43) + + tasks = [worker() for _ in range(100)] + await asyncio.gather(*tasks) + await pool.close() + + class TestConnectionGC(tb.ClusterTestCase): async def _run_no_explicit_close_test(self): From 6cfdb3a386225d62716cf4f4aef2352f4f26d531 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 24 Apr 2021 13:33:58 -0700 Subject: [PATCH 036/193] Fix CI on Windows (#748) Due to executable file locking `pip install -U pip` doesn't work on Windows, one has to use `python -m pip install -U pip` instead. --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5b021809..469af2b2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -62,7 +62,7 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | - pip install -U pip setuptools + python -m pip install -U pip setuptools pip install -e .[test] - name: Test @@ -118,7 +118,7 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | - pip install -U pip setuptools + python -m pip install -U pip setuptools pip install -e .[test] - name: Test From bc4127f44d3c7fb0e9813cf7deba023569039a93 Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Mon, 26 Apr 2021 14:18:57 -0400 Subject: [PATCH 037/193] Loosen message test in test_invalid_input (#751) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In test_invalid_input, also accept “(str object cannot be interpreted as an integer)” wherever we expected “an integer is required”; required for Python 3.10 compatibility --- tests/test_codecs.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/test_codecs.py b/tests/test_codecs.py index b4ed7057..0eacf4e8 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -670,6 +670,11 @@ async def test_unhandled_type_fallback(self): ''') async def test_invalid_input(self): + # The latter message appears beginning in Python 3.10. + integer_required = ( + r"(an integer is required|" + r"\('str' object cannot be interpreted as an integer\))") + cases = [ ('bytea', 'a bytes-like object is required', [ 1, @@ -678,7 +683,7 @@ async def test_invalid_input(self): ('bool', 'a boolean is required', [ 1, ]), - ('int2', 'an integer is required', [ + ('int2', integer_required, [ '2', 'aa', ]), @@ -694,7 +699,7 @@ async def test_invalid_input(self): 4.1 * 10 ** 40, -4.1 * 10 ** 40, ]), - ('int4', 'an integer is required', [ + ('int4', integer_required, [ '2', 'aa', ]), @@ -705,7 +710,7 @@ async def test_invalid_input(self): 2**31, -2**31 - 1, ]), - ('int8', 'an integer is required', [ + ('int8', integer_required, [ '2', 'aa', ]), From 5cf4089a40380a19a51e3e0cb5b9834501b7edb4 Mon Sep 17 00:00:00 2001 From: Paul Draper Date: Mon, 26 Apr 2021 12:19:42 -0600 Subject: [PATCH 038/193] Support readonly and deferrable for non-serializable transactions (#747) Resolves #743 --- asyncpg/transaction.py | 34 +++++++++++----------------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/asyncpg/transaction.py b/asyncpg/transaction.py index 61e23f18..2d7ba49f 100644 --- a/asyncpg/transaction.py +++ b/asyncpg/transaction.py @@ -46,17 +46,6 @@ def __init__(self, connection, isolation, readonly, deferrable): 'isolation is expected to be either of {}, ' 'got {!r}'.format(ISOLATION_LEVELS, isolation)) - if isolation and isolation != 'serializable': - if readonly: - raise ValueError( - '"readonly" is only supported for ' - 'serializable transactions') - - if deferrable and not readonly: - raise ValueError( - '"deferrable" is only supported for ' - 'serializable readonly transactions') - self._isolation = isolation self._readonly = readonly self._deferrable = deferrable @@ -132,19 +121,18 @@ async def start(self): self._id = con._get_unique_id('savepoint') query = 'SAVEPOINT {};'.format(self._id) else: - if self._isolation is None: - query = 'BEGIN;' - elif self._isolation == 'read_committed': - query = 'BEGIN ISOLATION LEVEL READ COMMITTED;' + query = 'BEGIN' + if self._isolation == 'read_committed': + query += ' ISOLATION LEVEL READ COMMITTED' elif self._isolation == 'repeatable_read': - query = 'BEGIN ISOLATION LEVEL REPEATABLE READ;' - else: - query = 'BEGIN ISOLATION LEVEL SERIALIZABLE' - if self._readonly: - query += ' READ ONLY' - if self._deferrable: - query += ' DEFERRABLE' - query += ';' + query += ' ISOLATION LEVEL REPEATABLE READ' + elif self._isolation == 'serializable': + query += ' ISOLATION LEVEL SERIALIZABLE' + if self._readonly: + query += ' READ ONLY' + if self._deferrable: + query += ' DEFERRABLE' + query += ';' try: await self._connection.execute(query) From fa2c1e501980fe3bd47821b2224f63cc755c6971 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Mon, 26 Apr 2021 14:20:08 -0400 Subject: [PATCH 039/193] Rename _Retry to _RetryConnectSignal (#723) --- asyncpg/connect_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 3fd64252..8d80dc29 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -630,7 +630,7 @@ async def _connect_addr( before = time.monotonic() try: return await __connect_addr(params, timeout, True, *args) - except _Retry: + except _RetryConnectSignal: pass # second attempt @@ -641,7 +641,7 @@ async def _connect_addr( return await __connect_addr(params_retry, timeout, False, *args) -class _Retry(Exception): +class _RetryConnectSignal(Exception): pass @@ -687,7 +687,7 @@ async def __connect_addr( tr.close() # retry=True here is a redundant check because we don't want to - # accidentally raise the internal _Retry to the outer world + # accidentally raise the internal _RetryConnectSignal to the user if retry and ( params.sslmode == SSLMode.allow and not pr.is_ssl or params.sslmode == SSLMode.prefer and pr.is_ssl @@ -697,7 +697,7 @@ async def __connect_addr( # 2. First attempt with sslmode=prefer, ssl=ctx failed while the # server claimed to support SSL (returning "S" for SSLRequest) # (likely because pg_hba.conf rejected the connection) - raise _Retry() + raise _RetryConnectSignal() else: # but will NOT retry if: From a113d908e1c1a98c5d113a9229e953a425377c37 Mon Sep 17 00:00:00 2001 From: Alexander Shadchin Date: Mon, 26 Apr 2021 21:21:30 +0300 Subject: [PATCH 040/193] Fix asyncpg with Py_DEBUG mode (#719) If Py_DEBUG enabled, then newly allocated memory is filled with the byte 0xCD (CLEANBYTE) https://docs.python.org/3/c-api/memory.html#c.PyMem_SetupDebugHooks This breaks checks for `pointer == NULL` and results in crash. From documentation PyObject_GC_Track https://docs.python.org/3/c-api/gcsupport.html#c.PyObject_GC_Track: > This should be called once all the fields followed by the tp_traverse > handler become valid, usually near the end of the constructor. --- asyncpg/protocol/record/recordobj.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index 8ee27f59..e912782f 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -31,6 +31,7 @@ ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) { ApgRecordObject *o; Py_ssize_t i; + int need_gc_track = 0; if (size < 0 || desc == NULL || !ApgRecordDesc_CheckExact(desc)) { PyErr_BadInternalCall(); @@ -54,7 +55,7 @@ ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) } } - PyObject_GC_Track(o); + need_gc_track = 1; } else { assert(PyType_IsSubtype(type, &ApgRecord_Type)); @@ -78,6 +79,9 @@ ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) Py_INCREF(desc); o->desc = (ApgRecordDescObject*)desc; o->self_hash = -1; + if (need_gc_track) { + PyObject_GC_Track(o); + } return (PyObject *) o; } From c3060680d0b8565be14f3d3a6300275612abf6be Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Mon, 26 Apr 2021 14:22:28 -0400 Subject: [PATCH 041/193] Fix docs/Makefile and docs/_static/theme_overrides.css missing from PyPI package (#708) --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 08be0d4b..2389f6fa 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -recursive-include docs *.py *.rst +recursive-include docs *.py *.rst Makefile *.css recursive-include examples *.py recursive-include tests *.py *.pem recursive-include asyncpg *.pyx *.pxd *.pxi *.py *.c *.h From ab6c4366836a552eea2a2535dce30ab3c0764f9d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 17 May 2021 11:51:10 -0700 Subject: [PATCH 042/193] asyncpg v0.23.0 --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 32b8dffc..c042b816 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.23.0.dev0' +__version__ = '0.23.0' From edefd69b4a0dadd045c83159edd8fc6bc05c276a Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 17 May 2021 12:10:11 -0700 Subject: [PATCH 043/193] Post-release version bump --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index c042b816..eab825c7 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.23.0' +__version__ = '0.24.0.dev0' From d08a9b8b9c2c68d469aeb9c4112d83c56c24f11c Mon Sep 17 00:00:00 2001 From: Fantix King Date: Sat, 24 Jul 2021 15:24:22 -0400 Subject: [PATCH 044/193] Break connection internal circular reference (#774) The connection will now terminate itself immediately if there is no external reference to it. Refs #772 --- asyncpg/connect_utils.py | 4 +--- asyncpg/connection.py | 11 ++++++++++- tests/test_connect.py | 6 +----- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 8d80dc29..86259be3 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -729,7 +729,7 @@ async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): for addr in addrs: before = time.monotonic() try: - con = await _connect_addr( + return await _connect_addr( addr=addr, loop=loop, timeout=timeout, @@ -740,8 +740,6 @@ async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): ) except (OSError, asyncio.TimeoutError, ConnectionError) as ex: last_error = ex - else: - return con finally: timeout -= time.monotonic() - before diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 043c6ddd..3f678d16 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -9,11 +9,13 @@ import asyncpg import collections import collections.abc +import functools import itertools import sys import time import traceback import warnings +import weakref from . import compat from . import connect_utils @@ -70,7 +72,8 @@ def __init__(self, protocol, transport, loop, self._stmt_cache = _StatementCache( loop=loop, max_size=config.statement_cache_size, - on_remove=self._maybe_gc_stmt, + on_remove=functools.partial( + _weak_maybe_gc_stmt, weakref.ref(self)), max_lifetime=config.max_cached_statement_lifetime) self._stmts_to_close = set() @@ -2260,4 +2263,10 @@ def _check_record_class(record_class): ) +def _weak_maybe_gc_stmt(weak_ref, stmt): + self = weak_ref() + if self is not None: + self._maybe_gc_stmt(stmt) + + _uid = 0 diff --git a/tests/test_connect.py b/tests/test_connect.py index 7b08f93d..ff884af8 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -7,7 +7,6 @@ import asyncio import contextlib -import gc import ipaddress import os import platform @@ -1448,14 +1447,11 @@ class TestConnectionGC(tb.ClusterTestCase): async def _run_no_explicit_close_test(self): con = await self.connect() + await con.fetchval("select 123") proto = con._protocol conref = weakref.ref(con) del con - gc.collect() - gc.collect() - gc.collect() - self.assertIsNone(conref()) self.assertTrue(proto.is_closed()) From da58cd26d3b2763ffa52533d72b3b2aee2c64108 Mon Sep 17 00:00:00 2001 From: Andrey Semakin Date: Tue, 3 Aug 2021 02:13:23 +0500 Subject: [PATCH 045/193] Drop support for Python 3.5 (#777) --- .github/workflows/release.yml | 7 +------ .github/workflows/tests.yml | 10 ++-------- README.rst | 2 +- asyncpg/compat.py | 37 ----------------------------------- asyncpg/connection.py | 6 +++--- asyncpg/cursor.py | 3 --- asyncpg/pgproto | 2 +- docs/index.rst | 2 +- setup.py | 7 +++---- tests/test_copy.py | 5 ----- 10 files changed, 12 insertions(+), 69 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6a578425..7ed44af1 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -72,15 +72,10 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: [3.5, 3.6, 3.7, 3.8, 3.9] + python-version: [3.6, 3.7, 3.8, 3.9] os: [ubuntu-20.04, macos-latest, windows-latest] arch: [x86_64] exclude: - # Python 3.5 is unable to properly - # find the recent VS tooling - # https://bugs.python.org/issue30389 - - os: windows-latest - python-version: 3.5 - os: windows-latest arch: aarch64 - os: macos-latest diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 469af2b2..e55e282b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,14 +17,8 @@ jobs: # job. strategy: matrix: - python-version: [3.5, 3.6, 3.7, 3.8, 3.9] - os: [ubuntu-latest, macos-latest, windows-latest] - exclude: - # Python 3.5 is unable to properly - # find the recent VS tooling - # https://bugs.python.org/issue30389 - - os: windows-latest - python-version: 3.5 + python-version: [3.6, 3.7, 3.8, 3.9] + os: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.os }} diff --git a/README.rst b/README.rst index 4ee68b4d..2f5da7a4 100644 --- a/README.rst +++ b/README.rst @@ -13,7 +13,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. You can read more about asyncpg in an introductory `blog post `_. -asyncpg requires Python 3.5 or later and is supported for PostgreSQL +asyncpg requires Python 3.6 or later and is supported for PostgreSQL versions 9.5 to 13. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 6dbce3c9..348b8caa 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -6,52 +6,15 @@ import asyncio -import functools -import os import pathlib import platform import sys -PY_36 = sys.version_info >= (3, 6) PY_37 = sys.version_info >= (3, 7) SYSTEM = platform.uname().system -if sys.version_info < (3, 5, 2): - def aiter_compat(func): - @functools.wraps(func) - async def wrapper(self): - return func(self) - return wrapper -else: - def aiter_compat(func): - return func - - -if PY_36: - fspath = os.fspath -else: - def fspath(path): - fsp = getattr(path, '__fspath__', None) - if fsp is not None and callable(fsp): - path = fsp() - if not isinstance(path, (str, bytes)): - raise TypeError( - 'expected {}() to return str or bytes, not {}'.format( - fsp.__qualname__, type(path).__name__ - )) - return path - elif isinstance(path, (str, bytes)): - return path - else: - raise TypeError( - 'expected str, bytes or path-like object, not {}'.format( - type(path).__name__ - ) - ) - - if SYSTEM == 'Windows': import ctypes.wintypes diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 3f678d16..4a656124 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -11,6 +11,7 @@ import collections.abc import functools import itertools +import os import sys import time import traceback @@ -957,7 +958,7 @@ def _format_copy_opts(self, *, format=None, oids=None, freeze=None, async def _copy_out(self, copy_stmt, output, timeout): try: - path = compat.fspath(output) + path = os.fspath(output) except TypeError: # output is not a path-like object path = None @@ -996,7 +997,7 @@ async def _writer(data): async def _copy_in(self, copy_stmt, source, timeout): try: - path = compat.fspath(source) + path = os.fspath(source) except TypeError: # source is not a path-like object path = None @@ -1027,7 +1028,6 @@ async def _copy_in(self, copy_stmt, source, timeout): if f is not None: # Copying from a file-like object. class _Reader: - @compat.aiter_compat def __aiter__(self): return self diff --git a/asyncpg/cursor.py b/asyncpg/cursor.py index 978824c3..7ec159ba 100644 --- a/asyncpg/cursor.py +++ b/asyncpg/cursor.py @@ -7,7 +7,6 @@ import collections -from . import compat from . import connresource from . import exceptions @@ -48,7 +47,6 @@ def __init__( if state is not None: state.attach() - @compat.aiter_compat @connresource.guarded def __aiter__(self): prefetch = 50 if self._prefetch is None else self._prefetch @@ -206,7 +204,6 @@ def __init__( self._prefetch = prefetch self._timeout = timeout - @compat.aiter_compat @connresource.guarded def __aiter__(self): return self diff --git a/asyncpg/pgproto b/asyncpg/pgproto index 126bcd24..719c7c76 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit 126bcd24bd3c59d581613dae026e2721efbedf16 +Subproject commit 719c7c76ee92988f094c447bae18b47ab04a2185 diff --git a/docs/index.rst b/docs/index.rst index 77bf19f3..57031b03 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,7 +14,7 @@ PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. -**asyncpg** requires Python 3.5 or later and is supported for PostgreSQL +**asyncpg** requires Python 3.6 or later and is supported for PostgreSQL versions 9.5 to 13. Contents diff --git a/setup.py b/setup.py index b2fc8f33..606620cd 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ import sys -if sys.version_info < (3, 5): - raise RuntimeError('asyncpg requires Python 3.5 or greater') +if sys.version_info < (3, 6): + raise RuntimeError('asyncpg requires Python 3.6 or greater') import os import os.path @@ -259,7 +259,6 @@ def finalize_options(self): 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', @@ -268,7 +267,7 @@ def finalize_options(self): 'Topic :: Database :: Front-Ends', ], platforms=['macOS', 'POSIX', 'Windows'], - python_requires='>=3.5.0', + python_requires='>=3.6.0', zip_safe=False, author='MagicStack Inc', author_email='hello@magic.io', diff --git a/tests/test_copy.py b/tests/test_copy.py index dd01153f..dcac96ac 100644 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -13,7 +13,6 @@ import asyncpg from asyncpg import _testbase as tb -from asyncpg import compat class TestCopyFrom(tb.ConnectedTestCase): @@ -467,7 +466,6 @@ class _Source: def __init__(self): self.rowcount = 0 - @compat.aiter_compat def __aiter__(self): return self @@ -507,7 +505,6 @@ class _Source: def __init__(self): self.rowcount = 0 - @compat.aiter_compat def __aiter__(self): return self @@ -533,7 +530,6 @@ class _Source: def __init__(self): self.rowcount = 0 - @compat.aiter_compat def __aiter__(self): return self @@ -564,7 +560,6 @@ def __init__(self, loop): self.rowcount = 0 self.loop = loop - @compat.aiter_compat def __aiter__(self): return self From d0761694bd7882c047e4fedae17291abf67cf69e Mon Sep 17 00:00:00 2001 From: Natrinicle Date: Mon, 2 Aug 2021 21:28:36 +0000 Subject: [PATCH 046/193] Make Server Version Extraction More Flexible (#778) https://github.com/MagicStack/asyncpg/issues/250 https://github.com/MagicStack/asyncpg/issues/261 https://github.com/MagicStack/asyncpg/issues/771 Just tried to use this library through Ormar->Databases->AsyncPG against a Yugabyte cluster and hit issue 771. Looks like this has been a problem for a while now so going for a complete overhaul of the server version extraction method. Using a groupdict regex against the version string allows for much higher flexibility in extracting what we're looking for and fixes #771 while not breaking any of the existing version patterns. --- asyncpg/serverversion.py | 97 +++++++++++++++++++++------------------- tests/test_connect.py | 3 ++ 2 files changed, 53 insertions(+), 47 deletions(-) diff --git a/asyncpg/serverversion.py b/asyncpg/serverversion.py index 6b2d28c7..31568a2e 100644 --- a/asyncpg/serverversion.py +++ b/asyncpg/serverversion.py @@ -5,53 +5,56 @@ # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 -from . import types +import re + +from .types import ServerVersion + +version_regex = re.compile( + r"(Postgre[^\s]*)?\s*" + r"(?P[0-9]+)\.?" + r"((?P[0-9]+)\.?)?" + r"(?P[0-9]+)?" + r"(?P[a-z]+)?" + r"(?P[0-9]+)?" +) def split_server_version_string(version_string): - version_string = version_string.strip() - if version_string.startswith('PostgreSQL '): - version_string = version_string[len('PostgreSQL '):] - if version_string.startswith('Postgres-XL'): - version_string = version_string[len('Postgres-XL '):] - - # Some distros (e.g Debian) like may inject their branding - # into the numeric version string, so make sure to only look - # at stuff before the first space. - version_string = version_string.split(' ')[0] - parts = version_string.strip().split('.') - if not parts[-1].isdigit(): - # release level specified - lastitem = parts[-1] - levelpart = lastitem.rstrip('0123456789').lower() - if levelpart != lastitem: - serial = int(lastitem[len(levelpart):]) - else: - serial = 0 - - level = levelpart.lstrip('0123456789') - if level != levelpart: - parts[-1] = levelpart[:-len(level)] - else: - parts[-1] = 0 - else: - level = 'final' - serial = 0 - - if int(parts[0]) >= 10: - # Since PostgreSQL 10 the versioning scheme has changed. - # 10.x really means 10.0.x. While parsing 10.1 - # as (10, 1) may seem less confusing, in practice most - # version checks are written as version[:2], and we - # want to keep that behaviour consistent, i.e not fail - # a major version check due to a bugfix release. - parts.insert(1, 0) - - versions = [int(p) for p in parts][:3] - if len(versions) < 3: - versions += [0] * (3 - len(versions)) - - versions.append(level) - versions.append(serial) - - return types.ServerVersion(*versions) + version_match = version_regex.search(version_string) + + if version_match is None: + raise ValueError( + "Unable to parse Postgres " + f'version from "{version_string}"' + ) + + version = version_match.groupdict() + for ver_key, ver_value in version.items(): + # Cast all possible versions parts to int + try: + version[ver_key] = int(ver_value) + except (TypeError, ValueError): + pass + + if version.get("major") < 10: + return ServerVersion( + version.get("major"), + version.get("minor") or 0, + version.get("micro") or 0, + version.get("releaselevel") or "final", + version.get("serial") or 0, + ) + + # Since PostgreSQL 10 the versioning scheme has changed. + # 10.x really means 10.0.x. While parsing 10.1 + # as (10, 1) may seem less confusing, in practice most + # version checks are written as version[:2], and we + # want to keep that behaviour consistent, i.e not fail + # a major version check due to a bugfix release. + return ServerVersion( + version.get("major"), + 0, + version.get("minor") or 0, + version.get("releaselevel") or "final", + version.get("serial") or 0, + ) diff --git a/tests/test_connect.py b/tests/test_connect.py index ff884af8..84eac202 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -64,6 +64,9 @@ def test_server_version_02(self): ("10.1", (10, 0, 1, 'final', 0),), ("11.1.2", (11, 0, 1, 'final', 0),), ("PostgreSQL 10.1 (Debian 10.1-3)", (10, 0, 1, 'final', 0),), + ("PostgreSQL 11.2-YB-2.7.1.1-b0 on x86_64-pc-linux-gnu, " + "compiled by gcc (Homebrew gcc 5.5.0_4) 5.5.0, 64-bit", + (11, 0, 2, "final", 0),), ] for version, expected in versions: result = split_server_version_string(version) From de07d0ab6f98cbde8bbdef4ac3e62aa0a73fa283 Mon Sep 17 00:00:00 2001 From: Kassym Dorsel Date: Mon, 2 Aug 2021 18:23:17 -0400 Subject: [PATCH 047/193] Add issubset and issuperset methods to the Range type (#563) --- asyncpg/types.py | 33 ++++++++++++++++++++++++++ tests/test_types.py | 56 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 tests/test_types.py diff --git a/asyncpg/types.py b/asyncpg/types.py index 5124718f..bd5813fc 100644 --- a/asyncpg/types.py +++ b/asyncpg/types.py @@ -85,6 +85,39 @@ def upper_inf(self): def isempty(self): return self._empty + def _issubset_lower(self, other): + if other._lower is None: + return True + if self._lower is None: + return False + + return self._lower > other._lower or ( + self._lower == other._lower + and (other._lower_inc or not self._lower_inc) + ) + + def _issubset_upper(self, other): + if other._upper is None: + return True + if self._upper is None: + return False + + return self._upper < other._upper or ( + self._upper == other._upper + and (other._upper_inc or not self._upper_inc) + ) + + def issubset(self, other): + if self._empty: + return True + if other._empty: + return False + + return self._issubset_lower(other) and self._issubset_upper(other) + + def issuperset(self, other): + return other.issubset(self) + def __bool__(self): return not self._empty diff --git a/tests/test_types.py b/tests/test_types.py new file mode 100644 index 00000000..04241953 --- /dev/null +++ b/tests/test_types.py @@ -0,0 +1,56 @@ +# Copyright (C) 2016-present the ayncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + +from itertools import product + +from asyncpg.types import Range +from asyncpg import _testbase as tb + + +class TestTypes(tb.TestCase): + + def test_range_issubset(self): + subs = [ + Range(empty=True), + Range(lower=1, upper=5, lower_inc=True, upper_inc=False), + Range(lower=1, upper=5, lower_inc=True, upper_inc=True), + Range(lower=1, upper=5, lower_inc=False, upper_inc=True), + Range(lower=1, upper=5, lower_inc=False, upper_inc=False), + Range(lower=-5, upper=10), + Range(lower=2, upper=3), + Range(lower=1, upper=None), + Range(lower=None, upper=None) + ] + + sups = [ + Range(empty=True), + Range(lower=1, upper=5, lower_inc=True, upper_inc=False), + Range(lower=1, upper=5, lower_inc=True, upper_inc=True), + Range(lower=1, upper=5, lower_inc=False, upper_inc=True), + Range(lower=1, upper=5, lower_inc=False, upper_inc=False), + Range(lower=None, upper=None) + ] + + # Each row is 1 subs with all sups + results = [ + True, True, True, True, True, True, + False, True, True, False, False, True, + False, False, True, False, False, True, + False, False, True, True, False, True, + False, True, True, True, True, True, + False, False, False, False, False, True, + False, True, True, True, True, True, + False, False, False, False, False, True, + False, False, False, False, False, True + ] + + for (sub, sup), res in zip(product(subs, sups), results): + self.assertIs( + sub.issubset(sup), res, "Sub:{}, Sup:{}".format(sub, sup) + ) + self.assertIs( + sup.issuperset(sub), res, "Sub:{}, Sup:{}".format(sub, sup) + ) From abf556991521b2056d44bc0e1d8cde7762cbcd2d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 5 Aug 2021 11:38:23 -0700 Subject: [PATCH 048/193] Add support for Python 3.10 (#795) --- .github/workflows/tests.yml | 43 ++++++++++++++++++++++--------------- asyncpg/pgproto | 2 +- setup.py | 3 ++- 3 files changed, 29 insertions(+), 19 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e55e282b..9d5a3216 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,8 +17,16 @@ jobs: # job. strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9] - os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-beta.4] + os: [ubuntu-latest, macos-latest, windows-latest] + loop: [asyncio, uvloop] + exclude: + # uvloop does not support Python 3.6 + - loop: uvloop + python-version: 3.6 + # uvloop does not support windows + - loop: uvloop + os: windows-latest runs-on: ${{ matrix.os }} @@ -27,23 +35,23 @@ jobs: shell: bash steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 with: fetch-depth: 50 submodules: true - name: Check if release PR. uses: edgedb/action-release/validate-pr@master - continue-on-error: true id: release with: github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + missing_version_ok: yes version_file: asyncpg/_version.py version_line_pattern: | __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 if: steps.release.outputs.version == 0 with: python-version: ${{ matrix.python-version }} @@ -56,28 +64,29 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | - python -m pip install -U pip setuptools - pip install -e .[test] + python -m pip install -U pip setuptools wheel + python -m pip install -e .[test] - name: Test if: steps.release.outputs.version == 0 + env: + LOOP_IMPL: ${{ matrix.loop }} run: | - python setup.py test - - - name: Test under uvloop - if: steps.release.outputs.version == 0 && matrix.os != 'windows-latest' && matrix.python-version != '3.9' - run: | - env USE_UVLOOP=1 python setup.py test + if [ "${LOOP_IMPL}" = "uvloop" ]; then + env USE_UVLOOP=1 python setup.py test + else + python setup.py test + fi test-postgres: strategy: matrix: postgres-version: [9.5, 9.6, 10, 11, 12, 13] - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 with: fetch-depth: 50 submodules: true @@ -104,7 +113,7 @@ jobs: >> "${GITHUB_ENV}" - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 if: steps.release.outputs.version == 0 with: python-version: ${{ matrix.python-version }} @@ -127,7 +136,7 @@ jobs: regression-tests: name: "Regression Tests" needs: [test-platforms, test-postgres] - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - run: echo OK diff --git a/asyncpg/pgproto b/asyncpg/pgproto index 719c7c76..1720f8af 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit 719c7c76ee92988f094c447bae18b47ab04a2185 +Subproject commit 1720f8af63725d79454884cfa787202a50eb5430 diff --git a/setup.py b/setup.py index 606620cd..3bdc97ab 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ # (example breakage: https://gitlab.com/pycqa/flake8/issues/427) 'pycodestyle~=2.5.0', 'flake8~=3.7.9', - 'uvloop~=0.14.0;platform_system!="Windows"', + 'uvloop>=0.15.3; platform_system != "Windows" and python_version >= "3.7"', ] # Dependencies required to build documentation. @@ -263,6 +263,7 @@ def finalize_options(self): 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Database :: Front-Ends', ], From 67ebbc91ff466c9c48d0a301d8e63ae1b68b939e Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 7 Aug 2021 15:30:50 -0700 Subject: [PATCH 049/193] Bump minimum required Cython version (#799) 0.29.24 is needed to compile properly under Python 3.10 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 3bdc97ab..18c34d41 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ from setuptools.command import build_ext as setuptools_build_ext -CYTHON_DEPENDENCY = 'Cython(>=0.29.20,<0.30.0)' +CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<0.30.0)' # Minimal dependencies required to test asyncpg. TEST_DEPENDENCIES = [ From a6b0f2837691c326d73bc4f722906c4997dc63f4 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 17:16:07 -0700 Subject: [PATCH 050/193] Add copy_ wrappers to Pool (#661) The `copy_to_table()` and friends are currently missing from the `Pool` interface, add them in. Fixes: #641. --- asyncpg/pool.py | 196 ++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 182 insertions(+), 14 deletions(-) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index c4321a2f..c868097c 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -522,7 +522,7 @@ async def execute(self, query: str, *args, timeout: float=None) -> str: Pool performs this operation using one of its connections. Other than that, it behaves identically to - :meth:`Connection.execute() `. + :meth:`Connection.execute() `. .. versionadded:: 0.10.0 """ @@ -534,7 +534,8 @@ async def executemany(self, command: str, args, *, timeout: float=None): Pool performs this operation using one of its connections. Other than that, it behaves identically to - :meth:`Connection.executemany() `. + :meth:`Connection.executemany() + `. .. versionadded:: 0.10.0 """ @@ -546,7 +547,7 @@ async def fetch(self, query, *args, timeout=None) -> list: Pool performs this operation using one of its connections. Other than that, it behaves identically to - :meth:`Connection.fetch() `. + :meth:`Connection.fetch() `. .. versionadded:: 0.10.0 """ @@ -558,7 +559,8 @@ async def fetchval(self, query, *args, column=0, timeout=None): Pool performs this operation using one of its connections. Other than that, it behaves identically to - :meth:`Connection.fetchval() `. + :meth:`Connection.fetchval() + `. .. versionadded:: 0.10.0 """ @@ -571,13 +573,178 @@ async def fetchrow(self, query, *args, timeout=None): Pool performs this operation using one of its connections. Other than that, it behaves identically to - :meth:`Connection.fetchrow() `. + :meth:`Connection.fetchrow() `. .. versionadded:: 0.10.0 """ async with self.acquire() as con: return await con.fetchrow(query, *args, timeout=timeout) + async def copy_from_table( + self, + table_name, + *, + output, + columns=None, + schema_name=None, + timeout=None, + format=None, + oids=None, + delimiter=None, + null=None, + header=None, + quote=None, + escape=None, + force_quote=None, + encoding=None + ): + """Copy table contents to a file or file-like object. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_from_table() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_from_table( + table_name, + output=output, + columns=columns, + schema_name=schema_name, + timeout=timeout, + format=format, + oids=oids, + delimiter=delimiter, + null=null, + header=header, + quote=quote, + escape=escape, + force_quote=force_quote, + encoding=encoding + ) + + async def copy_from_query( + self, + query, + *args, + output, + timeout=None, + format=None, + oids=None, + delimiter=None, + null=None, + header=None, + quote=None, + escape=None, + force_quote=None, + encoding=None + ): + """Copy the results of a query to a file or file-like object. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_from_query() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_from_query( + query, + *args, + output=output, + timeout=timeout, + format=format, + oids=oids, + delimiter=delimiter, + null=null, + header=header, + quote=quote, + escape=escape, + force_quote=force_quote, + encoding=encoding + ) + + async def copy_to_table( + self, + table_name, + *, + source, + columns=None, + schema_name=None, + timeout=None, + format=None, + oids=None, + freeze=None, + delimiter=None, + null=None, + header=None, + quote=None, + escape=None, + force_quote=None, + force_not_null=None, + force_null=None, + encoding=None + ): + """Copy data to the specified table. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_to_table() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_to_table( + table_name, + source=source, + columns=columns, + schema_name=schema_name, + timeout=timeout, + format=format, + oids=oids, + freeze=freeze, + delimiter=delimiter, + null=null, + header=header, + quote=quote, + escape=escape, + force_quote=force_quote, + force_not_null=force_not_null, + force_null=force_null, + encoding=encoding + ) + + async def copy_records_to_table( + self, + table_name, + *, + records, + columns=None, + schema_name=None, + timeout=None + ): + """Copy a list of records to the specified table using binary COPY. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_records_to_table() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_records_to_table( + table_name, + records=records, + columns=columns, + schema_name=schema_name, + timeout=timeout + ) + def acquire(self, *, timeout=None): """Acquire a database connection from the pool. @@ -844,12 +1011,12 @@ def create_pool(dsn=None, *, .. warning:: Prepared statements and cursors returned by - :meth:`Connection.prepare() ` and - :meth:`Connection.cursor() ` become - invalid once the connection is released. Likewise, all notification - and log listeners are removed, and ``asyncpg`` will issue a warning - if there are any listener callbacks registered on a connection that - is being released to the pool. + :meth:`Connection.prepare() ` + and :meth:`Connection.cursor() ` + become invalid once the connection is released. Likewise, all + notification and log listeners are removed, and ``asyncpg`` will + issue a warning if there are any listener callbacks registered on a + connection that is being released to the pool. :param str dsn: Connection arguments specified using as a single string in @@ -915,10 +1082,11 @@ def create_pool(dsn=None, *, .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceWarning` will be produced if there are any active listeners (added via - :meth:`Connection.add_listener() ` + :meth:`Connection.add_listener() + ` or :meth:`Connection.add_log_listener() - `) present on the connection - at the moment of its release to the pool. + `) present on the + connection at the moment of its release to the pool. .. versionchanged:: 0.22.0 Added the *record_class* parameter. From 1d33ff625a455d8445d4adc03c0fdbbec53ec5df Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 17:16:28 -0700 Subject: [PATCH 051/193] Add support for asynchronous iterables to copy_records_to_table() (#713) The `Connection.copy_records_to_table()` now allows the `records` argument to be an asynchronous iterable. Fixes: #689. --- asyncpg/connection.py | 31 +++++++++++++++---- asyncpg/protocol/protocol.pyx | 57 ++++++++++++++++++++++++----------- tests/test_copy.py | 23 ++++++++++++++ 3 files changed, 87 insertions(+), 24 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 4a656124..e01c6b65 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -872,6 +872,8 @@ async def copy_records_to_table(self, table_name, *, records, :param records: An iterable returning row tuples to copy into the table. + :term:`Asynchronous iterables ` + are also supported. :param list columns: An optional list of column names to copy. @@ -901,7 +903,28 @@ async def copy_records_to_table(self, table_name, *, records, >>> asyncio.get_event_loop().run_until_complete(run()) 'COPY 2' + Asynchronous record iterables are also supported: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... async def record_gen(size): + ... for i in range(size): + ... yield (i,) + ... result = await con.copy_records_to_table( + ... 'mytable', records=record_gen(100)) + ... print(result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + 'COPY 100' + .. versionadded:: 0.11.0 + + .. versionchanged:: 0.24.0 + The ``records`` argument may be an asynchronous iterable. """ tabname = utils._quote_ident(table_name) if schema_name: @@ -924,8 +947,8 @@ async def copy_records_to_table(self, table_name, *, records, copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts}'.format( tab=tabname, cols=cols, opts=opts) - return await self._copy_in_records( - copy_stmt, records, intro_ps._state, timeout) + return await self._protocol.copy_in( + copy_stmt, None, None, records, intro_ps._state, timeout) def _format_copy_opts(self, *, format=None, oids=None, freeze=None, delimiter=None, null=None, header=None, quote=None, @@ -1047,10 +1070,6 @@ async def __anext__(self): if opened_by_us: await run_in_executor(None, f.close) - async def _copy_in_records(self, copy_stmt, records, intro_stmt, timeout): - return await self._protocol.copy_in( - copy_stmt, None, None, records, intro_stmt, timeout) - async def set_type_codec(self, typename, *, schema='public', encoder, decoder, format='text'): diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 3a1594a5..dbe52e9e 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -13,7 +13,7 @@ cimport cpython import asyncio import builtins import codecs -import collections +import collections.abc import socket import time import weakref @@ -438,23 +438,44 @@ cdef class BaseProtocol(CoreProtocol): 'no binary format encoder for ' 'type {} (OID {})'.format(codec.name, codec.oid)) - for row in records: - # Tuple header - wbuf.write_int16(num_cols) - # Tuple data - for i in range(num_cols): - item = row[i] - if item is None: - wbuf.write_int32(-1) - else: - codec = cpython.PyTuple_GET_ITEM(codecs, i) - codec.encode(settings, wbuf, item) - - if wbuf.len() >= _COPY_BUFFER_SIZE: - with timer: - await self.writing_allowed.wait() - self._write_copy_data_msg(wbuf) - wbuf = WriteBuffer.new() + if isinstance(records, collections.abc.AsyncIterable): + async for row in records: + # Tuple header + wbuf.write_int16(num_cols) + # Tuple data + for i in range(num_cols): + item = row[i] + if item is None: + wbuf.write_int32(-1) + else: + codec = cpython.PyTuple_GET_ITEM( + codecs, i) + codec.encode(settings, wbuf, item) + + if wbuf.len() >= _COPY_BUFFER_SIZE: + with timer: + await self.writing_allowed.wait() + self._write_copy_data_msg(wbuf) + wbuf = WriteBuffer.new() + else: + for row in records: + # Tuple header + wbuf.write_int16(num_cols) + # Tuple data + for i in range(num_cols): + item = row[i] + if item is None: + wbuf.write_int32(-1) + else: + codec = cpython.PyTuple_GET_ITEM( + codecs, i) + codec.encode(settings, wbuf, item) + + if wbuf.len() >= _COPY_BUFFER_SIZE: + with timer: + await self.writing_allowed.wait() + self._write_copy_data_msg(wbuf) + wbuf = WriteBuffer.new() # End of binary copy. wbuf.write_int16(-1) diff --git a/tests/test_copy.py b/tests/test_copy.py index dcac96ac..70c9388e 100644 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -644,6 +644,29 @@ async def test_copy_records_to_table_1(self): finally: await self.con.execute('DROP TABLE copytab') + async def test_copy_records_to_table_async(self): + await self.con.execute(''' + CREATE TABLE copytab_async(a text, b int, c timestamptz); + ''') + + try: + date = datetime.datetime.now(tz=datetime.timezone.utc) + delta = datetime.timedelta(days=1) + + async def record_generator(): + for i in range(100): + yield ('a-{}'.format(i), i, date + delta) + + yield ('a-100', None, None) + + res = await self.con.copy_records_to_table( + 'copytab_async', records=record_generator()) + + self.assertEqual(res, 'COPY 101') + + finally: + await self.con.execute('DROP TABLE copytab_async') + async def test_copy_records_to_table_no_binary_codec(self): await self.con.execute(''' CREATE TABLE copytab(a uuid); From 41da093e9b8f502c59091362840a8612b6c32a31 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 17:16:51 -0700 Subject: [PATCH 052/193] Add support for coroutine functions as listener callbacks (#802) The `Connection.add_listener()`, `Connection.add_log_listener()` and `Connection.add_termination_listener()` now allow coroutine functions as callbacks. Fixes: #567. --- asyncpg/connection.py | 102 ++++++++++++++++++++++------------------ tests/test_listeners.py | 39 +++++++++++++++ 2 files changed, 96 insertions(+), 45 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index e01c6b65..cc7878de 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -11,10 +11,12 @@ import collections.abc import functools import itertools +import inspect import os import sys import time import traceback +import typing import warnings import weakref @@ -133,17 +135,21 @@ async def add_listener(self, channel, callback): :param str channel: Channel to listen on. :param callable callback: - A callable receiving the following arguments: + A callable or a coroutine function receiving the following + arguments: **connection**: a Connection the callback is registered with; **pid**: PID of the Postgres server that sent the notification; **channel**: name of the channel the notification was sent to; **payload**: the payload. + + .. versionchanged:: 0.24.0 + The ``callback`` argument may be a coroutine function. """ self._check_open() if channel not in self._listeners: await self.fetch('LISTEN {}'.format(utils._quote_ident(channel))) self._listeners[channel] = set() - self._listeners[channel].add(callback) + self._listeners[channel].add(_Callback.from_callable(callback)) async def remove_listener(self, channel, callback): """Remove a listening callback on the specified channel.""" @@ -151,9 +157,10 @@ async def remove_listener(self, channel, callback): return if channel not in self._listeners: return - if callback not in self._listeners[channel]: + cb = _Callback.from_callable(callback) + if cb not in self._listeners[channel]: return - self._listeners[channel].remove(callback) + self._listeners[channel].remove(cb) if not self._listeners[channel]: del self._listeners[channel] await self.fetch('UNLISTEN {}'.format(utils._quote_ident(channel))) @@ -166,44 +173,51 @@ def add_log_listener(self, callback): DEBUG, INFO, or LOG. :param callable callback: - A callable receiving the following arguments: + A callable or a coroutine function receiving the following + arguments: **connection**: a Connection the callback is registered with; **message**: the `exceptions.PostgresLogMessage` message. .. versionadded:: 0.12.0 + + .. versionchanged:: 0.24.0 + The ``callback`` argument may be a coroutine function. """ if self.is_closed(): raise exceptions.InterfaceError('connection is closed') - self._log_listeners.add(callback) + self._log_listeners.add(_Callback.from_callable(callback)) def remove_log_listener(self, callback): """Remove a listening callback for log messages. .. versionadded:: 0.12.0 """ - self._log_listeners.discard(callback) + self._log_listeners.discard(_Callback.from_callable(callback)) def add_termination_listener(self, callback): """Add a listener that will be called when the connection is closed. :param callable callback: - A callable receiving one argument: + A callable or a coroutine function receiving one argument: **connection**: a Connection the callback is registered with. .. versionadded:: 0.21.0 + + .. versionchanged:: 0.24.0 + The ``callback`` argument may be a coroutine function. """ - self._termination_listeners.add(callback) + self._termination_listeners.add(_Callback.from_callable(callback)) def remove_termination_listener(self, callback): """Remove a listening callback for connection termination. :param callable callback: - The callable that was passed to + The callable or coroutine function that was passed to :meth:`Connection.add_termination_listener`. .. versionadded:: 0.21.0 """ - self._termination_listeners.discard(callback) + self._termination_listeners.discard(_Callback.from_callable(callback)) def get_server_pid(self): """Return the PID of the Postgres server the connection is bound to.""" @@ -1449,18 +1463,10 @@ def _process_log_message(self, fields, last_query): con_ref = self._unwrap() for cb in self._log_listeners: - self._loop.call_soon( - self._call_log_listener, cb, con_ref, message) - - def _call_log_listener(self, cb, con_ref, message): - try: - cb(con_ref, message) - except Exception as ex: - self._loop.call_exception_handler({ - 'message': 'Unhandled exception in asyncpg log message ' - 'listener callback {!r}'.format(cb), - 'exception': ex - }) + if cb.is_async: + self._loop.create_task(cb.cb(con_ref, message)) + else: + self._loop.call_soon(cb.cb, con_ref, message) def _call_termination_listeners(self): if not self._termination_listeners: @@ -1468,16 +1474,10 @@ def _call_termination_listeners(self): con_ref = self._unwrap() for cb in self._termination_listeners: - try: - cb(con_ref) - except Exception as ex: - self._loop.call_exception_handler({ - 'message': ( - 'Unhandled exception in asyncpg connection ' - 'termination listener callback {!r}'.format(cb) - ), - 'exception': ex - }) + if cb.is_async: + self._loop.create_task(cb.cb(con_ref)) + else: + self._loop.call_soon(cb.cb, con_ref) self._termination_listeners.clear() @@ -1487,18 +1487,10 @@ def _process_notification(self, pid, channel, payload): con_ref = self._unwrap() for cb in self._listeners[channel]: - self._loop.call_soon( - self._call_listener, cb, con_ref, pid, channel, payload) - - def _call_listener(self, cb, con_ref, pid, channel, payload): - try: - cb(con_ref, pid, channel, payload) - except Exception as ex: - self._loop.call_exception_handler({ - 'message': 'Unhandled exception in asyncpg notification ' - 'listener callback {!r}'.format(cb), - 'exception': ex - }) + if cb.is_async: + self._loop.create_task(cb.cb(con_ref, pid, channel, payload)) + else: + self._loop.call_soon(cb.cb, con_ref, pid, channel, payload) def _unwrap(self): if self._proxy is None: @@ -2173,6 +2165,26 @@ def _maybe_cleanup(self): self._on_remove(old_entry._statement) +class _Callback(typing.NamedTuple): + + cb: typing.Callable[..., None] + is_async: bool + + @classmethod + def from_callable(cls, cb: typing.Callable[..., None]) -> '_Callback': + if inspect.iscoroutinefunction(cb): + is_async = True + elif callable(cb): + is_async = False + else: + raise exceptions.InterfaceError( + 'expected a callable or an `async def` function,' + 'got {!r}'.format(cb) + ) + + return cls(cb, is_async) + + class _Atomic: __slots__ = ('_acquired',) diff --git a/tests/test_listeners.py b/tests/test_listeners.py index 1af9627c..7fdf0312 100644 --- a/tests/test_listeners.py +++ b/tests/test_listeners.py @@ -23,6 +23,7 @@ async def test_listen_01(self): q1 = asyncio.Queue() q2 = asyncio.Queue() + q3 = asyncio.Queue() def listener1(*args): q1.put_nowait(args) @@ -30,8 +31,12 @@ def listener1(*args): def listener2(*args): q2.put_nowait(args) + async def async_listener3(*args): + q3.put_nowait(args) + await con.add_listener('test', listener1) await con.add_listener('test', listener2) + await con.add_listener('test', async_listener3) await con.execute("NOTIFY test, 'aaaa'") @@ -41,8 +46,12 @@ def listener2(*args): self.assertEqual( await q2.get(), (con, con.get_server_pid(), 'test', 'aaaa')) + self.assertEqual( + await q3.get(), + (con, con.get_server_pid(), 'test', 'aaaa')) await con.remove_listener('test', listener2) + await con.remove_listener('test', async_listener3) await con.execute("NOTIFY test, 'aaaa'") @@ -117,6 +126,7 @@ class TestLogListeners(tb.ConnectedTestCase): }) async def test_log_listener_01(self): q1 = asyncio.Queue() + q2 = asyncio.Queue() def notice_callb(con, message): # Message fields depend on PG version, hide some values. @@ -124,6 +134,12 @@ def notice_callb(con, message): del dct['server_source_line'] q1.put_nowait((con, type(message), dct)) + async def async_notice_callb(con, message): + # Message fields depend on PG version, hide some values. + dct = message.as_dict() + del dct['server_source_line'] + q2.put_nowait((con, type(message), dct)) + async def raise_notice(): await self.con.execute( """DO $$ @@ -140,6 +156,7 @@ async def raise_warning(): con = self.con con.add_log_listener(notice_callb) + con.add_log_listener(async_notice_callb) expected_msg = { 'context': 'PL/pgSQL function inline_code_block line 2 at RAISE', @@ -182,7 +199,21 @@ async def raise_warning(): msg, (con, exceptions.PostgresWarning, expected_msg_warn)) + msg = await q2.get() + msg[2].pop('server_source_filename', None) + self.assertEqual( + msg, + (con, exceptions.PostgresLogMessage, expected_msg_notice)) + + msg = await q2.get() + msg[2].pop('server_source_filename', None) + self.assertEqual( + msg, + (con, exceptions.PostgresWarning, expected_msg_warn)) + con.remove_log_listener(notice_callb) + con.remove_log_listener(async_notice_callb) + await raise_notice() self.assertTrue(q1.empty()) @@ -291,19 +322,26 @@ class TestConnectionTerminationListener(tb.ProxiedClusterTestCase): async def test_connection_termination_callback_called_on_remote(self): called = False + async_called = False def close_cb(con): nonlocal called called = True + async def async_close_cb(con): + nonlocal async_called + async_called = True + con = await self.connect() con.add_termination_listener(close_cb) + con.add_termination_listener(async_close_cb) self.proxy.close_all_connections() try: await con.fetchval('SELECT 1') except Exception: pass self.assertTrue(called) + self.assertTrue(async_called) async def test_connection_termination_callback_called_on_local(self): @@ -316,4 +354,5 @@ def close_cb(con): con = await self.connect() con.add_termination_listener(close_cb) await con.close() + await asyncio.sleep(0) self.assertTrue(called) From c674e86a9245631350d855b2664fb3ee734cfe69 Mon Sep 17 00:00:00 2001 From: Jan Dobes Date: Tue, 10 Aug 2021 02:18:49 +0200 Subject: [PATCH 053/193] Add support for sslcert, sslkey and sslrootcert parameters to DSN (#768) Co-authored-by: Elvis Pranskevichus --- asyncpg/_testbase/__init__.py | 4 +- asyncpg/connect_utils.py | 48 +++++++++++++++++++- asyncpg/connection.py | 58 ++++++++++++++++++++++-- tests/certs/client.cert.pem | 24 ++++++++++ tests/certs/client.csr.pem | 18 ++++++++ tests/certs/client.key.pem | 27 ++++++++++++ tests/certs/client_ca.cert.pem | 25 +++++++++++ tests/certs/client_ca.cert.srl | 1 + tests/certs/client_ca.key.pem | 27 ++++++++++++ tests/test_connect.py | 81 ++++++++++++++++++++++++++++++++-- 10 files changed, 303 insertions(+), 10 deletions(-) create mode 100644 tests/certs/client.cert.pem create mode 100644 tests/certs/client.csr.pem create mode 100644 tests/certs/client.key.pem create mode 100644 tests/certs/client_ca.cert.pem create mode 100644 tests/certs/client_ca.cert.srl create mode 100644 tests/certs/client_ca.key.pem diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py index ce7f827f..9944b20f 100644 --- a/asyncpg/_testbase/__init__.py +++ b/asyncpg/_testbase/__init__.py @@ -330,8 +330,10 @@ def tearDownClass(cls): @classmethod def get_connection_spec(cls, kwargs={}): conn_spec = cls.cluster.get_connection_spec() + if kwargs.get('dsn'): + conn_spec.pop('host') conn_spec.update(kwargs) - if not os.environ.get('PGHOST'): + if not os.environ.get('PGHOST') and not kwargs.get('dsn'): if 'database' not in conn_spec: conn_spec['database'] = 'postgres' if 'user' not in conn_spec: diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 86259be3..cd94b834 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -226,6 +226,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. auth_hosts = None + sslcert = sslkey = sslrootcert = sslcrl = None if dsn: parsed = urllib.parse.urlparse(dsn) @@ -310,6 +311,26 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if ssl is None: ssl = val + if 'sslcert' in query: + val = query.pop('sslcert') + if sslcert is None: + sslcert = val + + if 'sslkey' in query: + val = query.pop('sslkey') + if sslkey is None: + sslkey = val + + if 'sslrootcert' in query: + val = query.pop('sslrootcert') + if sslrootcert is None: + sslrootcert = val + + if 'sslcrl' in query: + val = query.pop('sslcrl') + if sslcrl is None: + sslcrl = val + if query: if server_settings is None: server_settings = query @@ -427,15 +448,38 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, '`sslmode` parameter must be one of: {}'.format(modes)) # docs at https://www.postgresql.org/docs/10/static/libpq-connect.html - # Not implemented: sslcert & sslkey & sslrootcert & sslcrl params. if sslmode < SSLMode.allow: ssl = False else: - ssl = ssl_module.create_default_context() + ssl = ssl_module.create_default_context( + ssl_module.Purpose.SERVER_AUTH) ssl.check_hostname = sslmode >= SSLMode.verify_full ssl.verify_mode = ssl_module.CERT_REQUIRED if sslmode <= SSLMode.require: ssl.verify_mode = ssl_module.CERT_NONE + + if sslcert is None: + sslcert = os.getenv('PGSSLCERT') + + if sslkey is None: + sslkey = os.getenv('PGSSLKEY') + + if sslrootcert is None: + sslrootcert = os.getenv('PGSSLROOTCERT') + + if sslcrl is None: + sslcrl = os.getenv('PGSSLCRL') + + if sslcert: + ssl.load_cert_chain(sslcert, keyfile=sslkey) + + if sslrootcert: + ssl.load_verify_locations(cafile=sslrootcert) + + if sslcrl: + ssl.load_verify_locations(cafile=sslcrl) + ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN + elif ssl is True: ssl = ssl_module.create_default_context() sslmode = SSLMode.verify_full diff --git a/asyncpg/connection.py b/asyncpg/connection.py index cc7878de..26249679 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1787,10 +1787,11 @@ async def connect(dsn=None, *, Connection arguments specified using as a single string in the `libpq connection URI format`_: ``postgres://user:password@host:port/database?option=value``. - The following options are recognized by asyncpg: host, port, - user, database (or dbname), password, passfile, sslmode. - Unlike libpq, asyncpg will treat unrecognized options - as `server settings`_ to be used for the connection. + The following options are recognized by asyncpg: ``host``, + ``port``, ``user``, ``database`` (or ``dbname``), ``password``, + ``passfile``, ``sslmode``, ``sslcert``, ``sslkey``, ``sslrootcert``, + and ``sslcrl``. Unlike libpq, asyncpg will treat unrecognized + options as `server settings`_ to be used for the connection. .. note:: @@ -1912,6 +1913,51 @@ async def connect(dsn=None, *, *ssl* is ignored for Unix domain socket communication. + Example of programmatic SSL context configuration that is equivalent + to ``sslmode=verify-full&sslcert=..&sslkey=..&sslrootcert=..``: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> import ssl + >>> async def main(): + ... # Load CA bundle for server certificate verification, + ... # equivalent to sslrootcert= in DSN. + ... sslctx = ssl.create_default_context( + ... ssl.Purpose.SERVER_AUTH, + ... cafile="path/to/ca_bundle.pem") + ... # If True, equivalent to sslmode=verify-full, if False: + ... # sslmode=verify-ca. + ... sslctx.check_hostname = True + ... # Load client certificate and private key for client + ... # authentication, equivalent to sslcert= and sslkey= in + ... # DSN. + ... sslctx.load_cert_chain( + ... "path/to/client.cert", + ... keyfile="path/to/client.key", + ... ) + ... con = await asyncpg.connect(user='postgres', ssl=sslctx) + ... await con.close() + >>> asyncio.run(run()) + + Example of programmatic SSL context configuration that is equivalent + to ``sslmode=require`` (no server certificate or host verification): + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> import ssl + >>> async def main(): + ... sslctx = ssl.create_default_context( + ... ssl.Purpose.SERVER_AUTH) + ... sslctx.check_hostname = False + ... sslctx.verify_mode = ssl.CERT_NONE + ... con = await asyncpg.connect(user='postgres', ssl=sslctx) + ... await con.close() + >>> asyncio.run(run()) + :param dict server_settings: An optional dict of server runtime parameters. Refer to PostgreSQL documentation for @@ -1970,6 +2016,10 @@ async def connect(dsn=None, *, .. versionchanged:: 0.22.0 The *ssl* argument now defaults to ``'prefer'``. + .. versionchanged:: 0.24.0 + The ``sslcert``, ``sslkey``, ``sslrootcert``, and ``sslcrl`` options + are supported in the *dsn* argument. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context diff --git a/tests/certs/client.cert.pem b/tests/certs/client.cert.pem new file mode 100644 index 00000000..b6d9a91a --- /dev/null +++ b/tests/certs/client.cert.pem @@ -0,0 +1,24 @@ +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIUPfej8IQ/5bCrihqWImrq2vKPOq0wDQYJKoZIhvcNAQEL +BQAwgaMxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdU +b3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5j +cGcgdGVzdHMxHzAdBgNVBAMMFmFzeW5jcGcgdGVzdCBjbGllbnQgQ0ExHTAbBgkq +hkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMB4XDTIxMDgwOTIxNTA1MloXDTMyMDEw +NDIxNTA1MlowgZUxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYD +VQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsM +DWFzeW5jcGcgdGVzdHMxETAPBgNVBAMMCHNzbF91c2VyMR0wGwYJKoZIhvcNAQkB +Fg5oZWxsb0BtYWdpYy5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AJjiP9Ik/KRRLK9GMvoH8m1LO+Gyrr8Gz36LpmKJMR/PpwTL+1pOkYSGhOyT3Cw9 +/kWWLJRCvYqKgFtYtbr4S6ReGm3GdSVW+sfVRYDrRQZLPgQSPeq25g2v8UZ63Ota +lPAyUPUZKpxyWz8PL77lV8psb9yv14yBH2kv9BbxKPksWOU8p8OCn1Z3WFFl0ItO +nzMvCp5os+xFrt4SpoRGTx9x4QleY+zrEsYZtmnV4wC+JuJkNw4fuCdrX5k7dghs +uZkcsAZof1nMdYsYiazeDfQKZtJqh5kO7mpwvCudKUWaLJJUwiQA87BwSlnCd/Hh +TZDbC+zeFNjTS49/4Q72xVECAwEAAaM7MDkwHwYDVR0jBBgwFoAUi1jMmAisuOib +mHIE2n0W2WnnaL0wCQYDVR0TBAIwADALBgNVHQ8EBAMCBPAwDQYJKoZIhvcNAQEL +BQADggEBACbnp5oOp639ko4jn8axF+so91k0vIcgwDg+NqgtSRsuAENGumHAa8ec +YOks0TCTvNN5E6AfNSxRat5CyguIlJ/Vy3KbkkFNXcCIcI/duAJvNphg7JeqYlQM +VIJhrO/5oNQMzzTw8XzTHnciGbrbiZ04hjwrruEkvmIAwgQPhIgq4H6umTZauTvk +DEo7uLm7RuG9hnDyWCdJxLLljefNL/EAuDYpPzgTeEN6JAnOu0ULIbpxpJKiYEId +8I0U2n0I2NTDOHmsAJiXf8BiHHmpK5SXFyY9s2ZuGkCzvmeZlR81tTXmHZ3v1X2z +8NajoAZfJ+QD50DrbF5E00yovZbyIB4= +-----END CERTIFICATE----- diff --git a/tests/certs/client.csr.pem b/tests/certs/client.csr.pem new file mode 100644 index 00000000..c6a87c65 --- /dev/null +++ b/tests/certs/client.csr.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIIC2zCCAcMCAQAwgZUxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAw +DgYDVQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNV +BAsMDWFzeW5jcGcgdGVzdHMxETAPBgNVBAMMCHNzbF91c2VyMR0wGwYJKoZIhvcN +AQkBFg5oZWxsb0BtYWdpYy5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAJjiP9Ik/KRRLK9GMvoH8m1LO+Gyrr8Gz36LpmKJMR/PpwTL+1pOkYSGhOyT +3Cw9/kWWLJRCvYqKgFtYtbr4S6ReGm3GdSVW+sfVRYDrRQZLPgQSPeq25g2v8UZ6 +3OtalPAyUPUZKpxyWz8PL77lV8psb9yv14yBH2kv9BbxKPksWOU8p8OCn1Z3WFFl +0ItOnzMvCp5os+xFrt4SpoRGTx9x4QleY+zrEsYZtmnV4wC+JuJkNw4fuCdrX5k7 +dghsuZkcsAZof1nMdYsYiazeDfQKZtJqh5kO7mpwvCudKUWaLJJUwiQA87BwSlnC +d/HhTZDbC+zeFNjTS49/4Q72xVECAwEAAaAAMA0GCSqGSIb3DQEBCwUAA4IBAQCG +irI2ph09V/4BMe6QMhjBFUatwmTa/05PYGjvT3LAhRzEb3/o/gca0XFSAFrE6zIY +DsgMk1c8aLr9DQsn9cf22oMFImKdnIZ3WLE9MXjN+s1Bjkiqt7uxDpxPo/DdfUTQ +RQC5i/Z2tn29y9K09lEjp35ZhPp3tOA0V4CH0FThAjRR+amwaBjxQ7TTSNfoMUd7 +i/DrylwnNg1iEQmYUwJYopqgxtwseiBUSDXzEvjFPY4AvZKmEQmE5QkybpWIfivt +1kmKhvKKpn5Cb6c0D3XoYqyPN3TxqjH9L8R+tWUCwhYJeDZj5DumFr3Hw/sx8tOL +EctyS6XfO3S2KbmDiyv8 +-----END CERTIFICATE REQUEST----- diff --git a/tests/certs/client.key.pem b/tests/certs/client.key.pem new file mode 100644 index 00000000..90389cab --- /dev/null +++ b/tests/certs/client.key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAmOI/0iT8pFEsr0Yy+gfybUs74bKuvwbPfoumYokxH8+nBMv7 +Wk6RhIaE7JPcLD3+RZYslEK9ioqAW1i1uvhLpF4abcZ1JVb6x9VFgOtFBks+BBI9 +6rbmDa/xRnrc61qU8DJQ9RkqnHJbPw8vvuVXymxv3K/XjIEfaS/0FvEo+SxY5Tyn +w4KfVndYUWXQi06fMy8Knmiz7EWu3hKmhEZPH3HhCV5j7OsSxhm2adXjAL4m4mQ3 +Dh+4J2tfmTt2CGy5mRywBmh/Wcx1ixiJrN4N9Apm0mqHmQ7uanC8K50pRZosklTC +JADzsHBKWcJ38eFNkNsL7N4U2NNLj3/hDvbFUQIDAQABAoIBAAIMVeqM0E2rQLwA +ZsJuxNKuBVlauXiZsMHzQQFk8SGJ+KTZzr5A+zYZT0KUIIj/M57fCi3aTwvCG0Ie +CCE/HlRPZm8+D2e2qJlwxAOcI0qYS3ZmgCna1W4tgz/8eWU1y3UEV41RDv8VkR9h +JrSaAfkWRtFgEbUyLaeNGuoLxQ7Bggo9zi1/xDJz/aZ/y4L4y8l1xs2eNVmbRGnj +mPr1daeYhsWgaNiT/Wm3CAxvykptHavyWSsrXzCp0bEw6fAXxBqkeDFGIMVC9q3t +ZRFtqMHi9i7SJtH1XauOC6QxLYgSEmNEie1JYbNx2Zf4h2KvSwDxpTqWhOjJ/m5j +/NSkASECgYEAyHQAqG90yz5QaYnC9lgUhGIMokg9O3LcEbeK7IKIPtC9xINOrnj6 +ecCfhfc1aP3wQI+VKC3kiYerfTJvVsU5CEawBQSRiBY/TZZ7hTR7Rkm3s4xeM+o6 +2zADdVUwmTVYwu0gUKCeDKO4iD8Uhh8J54JrKUejuG50VWZQWGVgqo0CgYEAwz+2 +VdYcfuQykMA3jQBnXmMMK92/Toq6FPDgsa45guEFD6Zfdi9347/0Ipt+cTNg0sUZ +YBLOnNPwLn+yInfFa88Myf0UxCAOoZKfpJg/J27soUJzpd/CGx+vaAHrxMP6t/qo +JAGMBIyOoqquId7jvErlC/sGBk/duya7IdiT1tUCgYBuvM8EPhaKlVE9DJL9Hmmv +PK94E2poZiq3SutffzkfYpgDcPrNnh3ZlxVJn+kMqITKVcfz226On7mYP32MtQWt +0cc57m0rfgbYqRJx4y1bBiyK7ze3fGWpYxv1/OsNKJBxlygsAp9toiC2fAqtkYYa +NE1ZD6+dmr9/0jb+rnq5nQKBgQCtZvwsp4ePOmOeItgzJdSoAxdgLgQlYRd6WaN0 +qeLx1Z6FE6FceTPk1SmhQq+9IYAwMFQk+w78QU3iPg6ahfyTjsMw8M9sj3vvCyU1 +LPGJt/34CehjvKHLLQy/NlWJ3vPgSYDi2Wzc7WgQF72m3ykqpOlfBoWHPY8TE4bG +vG4wMQKBgFSq2GDAJ1ovBl7yWYW7w4SM8X96YPOff+OmI4G/8+U7u3dDM1dYeQxD +7BHLuvr4AXg27LC97u8/eFIBXC1elbco/nAKE1YHj2xcIb/4TsgAqkcysGV08ngi +dULh3q0GpTYyuELZV4bfWE8MjSiGAH+nuMdXYDGuY2QnBq8MdSOH +-----END RSA PRIVATE KEY----- diff --git a/tests/certs/client_ca.cert.pem b/tests/certs/client_ca.cert.pem new file mode 100644 index 00000000..17d3f357 --- /dev/null +++ b/tests/certs/client_ca.cert.pem @@ -0,0 +1,25 @@ +-----BEGIN CERTIFICATE----- +MIIEKTCCAxGgAwIBAgIUKmL8tfNS9LIB6GLB9RpZpTyk3uIwDQYJKoZIhvcNAQEL +BQAwgaMxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdU +b3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5j +cGcgdGVzdHMxHzAdBgNVBAMMFmFzeW5jcGcgdGVzdCBjbGllbnQgQ0ExHTAbBgkq +hkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMB4XDTIxMDgwOTIxNDQxM1oXDTQxMDgw +NDIxNDQxM1owgaMxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYD +VQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsM +DWFzeW5jcGcgdGVzdHMxHzAdBgNVBAMMFmFzeW5jcGcgdGVzdCBjbGllbnQgQ0Ex +HTAbBgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAptRYfxKiWExfZguQDva53bIqYa4lJwZA86Qu0peBUcsd +E6zyHNgVv4XSMim1FH12KQ4KPKuQAcVqRMCRAHqB96kUfWQqF//fLajr0umdzcbx ++UTgNux8TkScTl9KNAxhiR/oOGbKFcNSs4raaG8puwwEN66uMhoKk2pN2NwDVfHa +bTekJ3jouTcTCnqCynx4qwI4WStJkuW4IPCmDRVXxOOauT7YalElYLWYtAOqGEvf +noDK2Imhc0h6B5XW8nI54rVCXWwhW1v3RLAJGP+LwSy++bf08xmpHXdKkAj5BmUO +QwJRiJ33Xa17rmi385egx8KpqV04YEAPdV1Z4QM6PQIDAQABo1MwUTAdBgNVHQ4E +FgQUi1jMmAisuOibmHIE2n0W2WnnaL0wHwYDVR0jBBgwFoAUi1jMmAisuOibmHIE +2n0W2WnnaL0wDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAifNE +ZLZXxECp2Sl6jCViZxgFf2+OHDvRORgI6J0heckYyYF/JHvLaDphh6TkSJAdT6Y3 +hAb7jueTMI+6RIdRzIjTKCGdJqUetiSfAbnQyIp2qmVqdjeFoXTvQL7BdkIE+kOW +0iomMqDB3czTl//LrgVQCYqKM0D/Ytecpg2mbshLfpPxdHyliCJcb4SqfdrDnKoV +HUduBjOVot+6bkB5SEGCrrB4KMFTzbAu+zriKWWz+uycIyeVMLEyhDs59vptOK6e +gWkraG43LZY3cHPiVeN3tA/dWdyJf9rgK21zQDSMB8OSH4yQjdQmkkvRQBjp3Fcy +w2SZIP4o9l1Y7+hMMw== +-----END CERTIFICATE----- diff --git a/tests/certs/client_ca.cert.srl b/tests/certs/client_ca.cert.srl new file mode 100644 index 00000000..0eae4d30 --- /dev/null +++ b/tests/certs/client_ca.cert.srl @@ -0,0 +1 @@ +3DF7A3F0843FE5B0AB8A1A96226AEADAF28F3AAD diff --git a/tests/certs/client_ca.key.pem b/tests/certs/client_ca.key.pem new file mode 100644 index 00000000..519c5a09 --- /dev/null +++ b/tests/certs/client_ca.key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAptRYfxKiWExfZguQDva53bIqYa4lJwZA86Qu0peBUcsdE6zy +HNgVv4XSMim1FH12KQ4KPKuQAcVqRMCRAHqB96kUfWQqF//fLajr0umdzcbx+UTg +Nux8TkScTl9KNAxhiR/oOGbKFcNSs4raaG8puwwEN66uMhoKk2pN2NwDVfHabTek +J3jouTcTCnqCynx4qwI4WStJkuW4IPCmDRVXxOOauT7YalElYLWYtAOqGEvfnoDK +2Imhc0h6B5XW8nI54rVCXWwhW1v3RLAJGP+LwSy++bf08xmpHXdKkAj5BmUOQwJR +iJ33Xa17rmi385egx8KpqV04YEAPdV1Z4QM6PQIDAQABAoIBABQrKcO7CftoyEO6 +9CCK/W9q4arLddxg6itKVwrInC66QnqlduO7z+1GjWHZHvYqMMXH17778r30EuPa +7+zB4sKBI2QBXwFlwqJvgIsQCS7edVRwWjbpoiGIM+lZpcvjD0uXmuhurNGyumXQ +TJVBkyb0zfG5YX/XHB40RNMJzjFuiMPDLVQmmDE//FOuWqBG88MgJP9Ghk3J7wA2 +JfDPavb49EzOCSh74zJWP7/QyybzF3ABCMu4OFkaOdqso8FS659XI55QReBbUppu +FRkOgao1BclJhbBdrdtLNjlETM82tfVgW56vaIrrU2z7HskihEyMdB4c+CYbBnPx +QqIhkhUCgYEA0SLVExtNy5Gmi6/ZY9tcd3QIuxcN6Xiup+LgIhWK3+GIoVOPsOjN +27dlVRINPKhrCfVbrLxUtDN5PzphwSA2Qddm4jg3d5FzX+FgKHQpoaU1WjtRPP+w +K+t6W/NbZ8Rn4JyhZQ3Yqj264NA2l3QmuTfZSUQ5m4x7EUakfGU7G1sCgYEAzDaU +jHsovn0FedOUaaYl6pgzjFV8ByPeT9usN54PZyuzyc+WunjJkxCQqD88J9jyG8XB +3V3tQj/CNbMczrS2ZaJ29aI4b/8NwBNR9e6t01bY3B90GJi8S4B4Hf8tYyIlVdeL +tCC4FCZhvl4peaK3AWBj4NhjvdB32ThDXSGxLEcCgYEAiA5tKHz+44ziGMZSW1B+ +m4f1liGtf1Jv7fD/d60kJ/qF9M50ENej9Wkel3Wi/u9ik5v4BCyRvpouKyBEMGxQ +YA1OdaW1ECikMqBg+nB4FR1x1D364ABIEIqlk+SCdsOkANBlf2S+rCJ0zYUnvuhl +uOHIjo3AHJ4MAnU+1V7WUTkCgYBkMedioc7U34x/QJNR3sY9ux2Xnh2zdyLNdc+i +njeafDPDMcoXhcoJERiYpCYEuwnXHIlI7pvJZHUKWe4pcTsI1NSfIk+ki7SYaCJP +kyLQTY0rO3d/1fiU5tyIgzomqIs++fm+kEsg/8/3UkXxOyelUkDPAfy2FgGnn1ZV +7ID8YwKBgQCeZCapdGJ6Iu5oYB17TyE5pLwb+QzaofR5uO8H4pXGVQyilKVCG9Dp +GMnlXD7bwXPVKa8Icow2OIbmgrZ2mzOo9BSY3BlkKbpJDy7UNtAhzsHHN5/AEk8z +YycWQtMiXI+cRsYO0eyHhJeSS2hX+JTe++iZX65twV53agzCHWRIbg== +-----END RSA PRIVATE KEY----- diff --git a/tests/test_connect.py b/tests/test_connect.py index 84eac202..be694d67 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -15,6 +15,7 @@ import tempfile import textwrap import unittest +import urllib.parse import weakref import asyncpg @@ -33,6 +34,9 @@ SSL_CA_CERT_FILE = os.path.join(CERTS, 'ca.cert.pem') SSL_CERT_FILE = os.path.join(CERTS, 'server.cert.pem') SSL_KEY_FILE = os.path.join(CERTS, 'server.key.pem') +CLIENT_CA_CERT_FILE = os.path.join(CERTS, 'client_ca.cert.pem') +CLIENT_SSL_CERT_FILE = os.path.join(CERTS, 'client.cert.pem') +CLIENT_SSL_KEY_FILE = os.path.join(CERTS, 'client.key.pem') class TestSettings(tb.ConnectedTestCase): @@ -1124,6 +1128,8 @@ async def verify_works(sslmode): try: con = await self.connect( dsn='postgresql://foo/?sslmode=' + sslmode, + user='postgres', + database='postgres', host='localhost') self.assertEqual(await con.fetchval('SELECT 42'), 42) self.assertFalse(con._protocol.is_ssl) @@ -1137,6 +1143,8 @@ async def verify_fails(sslmode): with self.assertRaises(ConnectionError): con = await self.connect( dsn='postgresql://foo/?sslmode=' + sslmode, + user='postgres', + database='postgres', host='localhost') await con.fetchval('SELECT 42') finally: @@ -1167,6 +1175,7 @@ def get_server_settings(cls): 'ssl': 'on', 'ssl_cert_file': SSL_CERT_FILE, 'ssl_key_file': SSL_KEY_FILE, + 'ssl_ca_file': CLIENT_CA_CERT_FILE, }) return conf @@ -1245,7 +1254,7 @@ async def verify_works(sslmode, *, host='localhost'): con = None try: con = await self.connect( - dsn='postgresql://foo/?sslmode=' + sslmode, + dsn='postgresql://foo/postgres?sslmode=' + sslmode, host=host, user='ssl_user') self.assertEqual(await con.fetchval('SELECT 42'), 42) @@ -1358,6 +1367,72 @@ async def test_executemany_uvloop_ssl_issue_700(self): await con.close() +@unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') +class TestClientSSLConnection(BaseTestSSLConnection): + def _add_hba_entry(self): + self.cluster.add_hba_entry( + type='hostssl', address=ipaddress.ip_network('127.0.0.0/24'), + database='postgres', user='ssl_user', + auth_method='cert') + + self.cluster.add_hba_entry( + type='hostssl', address=ipaddress.ip_network('::1/128'), + database='postgres', user='ssl_user', + auth_method='cert') + + async def test_ssl_connection_client_auth_fails_with_wrong_setup(self): + ssl_context = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH, + cafile=SSL_CA_CERT_FILE, + ) + + with self.assertRaisesRegex( + exceptions.InvalidAuthorizationSpecificationError, + "requires a valid client certificate", + ): + await self.connect( + host='localhost', + user='ssl_user', + ssl=ssl_context, + ) + + async def test_ssl_connection_client_auth_custom_context(self): + ssl_context = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH, + cafile=SSL_CA_CERT_FILE, + ) + ssl_context.load_cert_chain( + CLIENT_SSL_CERT_FILE, + keyfile=CLIENT_SSL_KEY_FILE, + ) + + con = await self.connect( + host='localhost', + user='ssl_user', + ssl=ssl_context, + ) + + try: + self.assertEqual(await con.fetchval('SELECT 42'), 42) + finally: + await con.close() + + async def test_ssl_connection_client_auth_dsn(self): + params = urllib.parse.urlencode({ + 'sslrootcert': SSL_CA_CERT_FILE, + 'sslcert': CLIENT_SSL_CERT_FILE, + 'sslkey': CLIENT_SSL_KEY_FILE, + 'sslmode': 'verify-full', + }) + dsn = 'postgres://ssl_user@localhost/postgres?' + params + con = await self.connect(dsn=dsn) + + try: + self.assertEqual(await con.fetchval('SELECT 42'), 42) + finally: + await con.close() + + @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') class TestNoSSLConnection(BaseTestSSLConnection): def _add_hba_entry(self): @@ -1376,7 +1451,7 @@ async def verify_works(sslmode, *, host='localhost'): con = None try: con = await self.connect( - dsn='postgresql://foo/?sslmode=' + sslmode, + dsn='postgresql://foo/postgres?sslmode=' + sslmode, host=host, user='ssl_user') self.assertEqual(await con.fetchval('SELECT 42'), 42) @@ -1413,7 +1488,7 @@ async def verify_fails(sslmode, *, host='localhost', async def test_nossl_connection_prefer_cancel(self): con = await self.connect( - dsn='postgresql://foo/?sslmode=prefer', + dsn='postgresql://foo/postgres?sslmode=prefer', host='localhost', user='ssl_user') self.assertFalse(con._protocol.is_ssl) From fe7eb022c3837e1cc12a1e47b46e83e0ff5c8844 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 17:29:45 -0700 Subject: [PATCH 054/193] Fix lint error --- tests/test_pool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pool.py b/tests/test_pool.py index 8a237323..4bf6a0c9 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -239,7 +239,7 @@ async def test_pool_11(self): with self.assertRaisesRegex( asyncpg.InterfaceError, r'cannot call Cursor\.forward.*released ' - r'back to the pool'.format(meth=meth)): + r'back to the pool'): c.forward(1) From 26e0660e0832a0673da94a3bdc230d7a3105408e Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 17:34:51 -0700 Subject: [PATCH 055/193] Bump development dependencies --- pytest.ini | 2 +- setup.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pytest.ini b/pytest.ini index 4603e347..b8a25b5f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,4 @@ [pytest] -addopts = --capture=no --assert=plain --strict --tb native +addopts = --capture=no --assert=plain --strict-markers --tb=native --import-mode=importlib testpaths = tests filterwarnings = default diff --git a/setup.py b/setup.py index 18c34d41..4da3fb25 100644 --- a/setup.py +++ b/setup.py @@ -32,16 +32,16 @@ # pycodestyle is a dependency of flake8, but it must be frozen because # their combination breaks too often # (example breakage: https://gitlab.com/pycqa/flake8/issues/427) - 'pycodestyle~=2.5.0', - 'flake8~=3.7.9', + 'pycodestyle~=2.7.0', + 'flake8~=3.9.2', 'uvloop>=0.15.3; platform_system != "Windows" and python_version >= "3.7"', ] # Dependencies required to build documentation. DOC_DEPENDENCIES = [ - 'Sphinx~=1.7.3', - 'sphinxcontrib-asyncio~=0.2.0', - 'sphinx_rtd_theme~=0.2.4', + 'Sphinx~=4.1.2', + 'sphinxcontrib-asyncio~=0.3.0', + 'sphinx_rtd_theme~=0.5.2', ] EXTRA_DEPENDENCIES = { @@ -50,7 +50,7 @@ # Dependencies required to develop asyncpg. 'dev': [ CYTHON_DEPENDENCY, - 'pytest>=3.6.0', + 'pytest>=6.0', ] + DOC_DEPENDENCIES + TEST_DEPENDENCIES } From 57979369b26b415ce8ed2b44965ccef0bdfff6f0 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 18:05:54 -0700 Subject: [PATCH 056/193] Github workflows touchups (#804) Bump versions of various actions used in the workflow, add Python 3.10 to the release list and silence pointless pip warnings which get surfaced as error annotations. --- .github/workflows/release.yml | 35 +++++++++++++++++++++-------------- .github/workflows/tests.yml | 10 ++++++++-- 2 files changed, 29 insertions(+), 16 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7ed44af1..55d2032f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,7 +37,7 @@ jobs: mkdir -p dist/ echo "${VERSION}" > dist/VERSION - - uses: actions/upload-artifact@v1 + - uses: actions/upload-artifact@v2 with: name: dist path: dist/ @@ -46,23 +46,24 @@ jobs: needs: validate-release-request runs-on: ubuntu-latest + env: + PIP_DISABLE_PIP_VERSION_CHECK: 1 + steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 with: fetch-depth: 50 submodules: true - - name: Set up Python 3.7 - uses: actions/setup-python@v1 - with: - python-version: 3.7 + - name: Set up Python + uses: actions/setup-python@v2 - name: Build source distribution run: | pip install -U setuptools wheel pip python setup.py sdist - - uses: actions/upload-artifact@v1 + - uses: actions/upload-artifact@v2 with: name: dist path: dist/ @@ -72,8 +73,8 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9] - os: [ubuntu-20.04, macos-latest, windows-latest] + python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0.rc.1] + os: [ubuntu-latest, macos-latest, windows-latest] arch: [x86_64] exclude: - os: windows-latest @@ -85,8 +86,11 @@ jobs: run: shell: bash + env: + PIP_DISABLE_PIP_VERSION_CHECK: 1 + steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 with: fetch-depth: 50 submodules: true @@ -96,7 +100,7 @@ jobs: uses: docker/setup-qemu-action@v1 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -202,7 +206,7 @@ jobs: && su -l test /github/workspace/.github/workflows/test-wheels.sh \ ' - - uses: actions/upload-artifact@v1 + - uses: actions/upload-artifact@v2 with: name: dist path: dist/ @@ -211,6 +215,9 @@ jobs: needs: validate-release-request runs-on: ubuntu-latest + env: + PIP_DISABLE_PIP_VERSION_CHECK: 1 + steps: - name: Checkout source uses: actions/checkout@v2 @@ -255,12 +262,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 with: fetch-depth: 5 submodules: false - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v2 with: name: dist path: dist/ diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9d5a3216..496d4c47 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-beta.4] + python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc.1] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: @@ -34,6 +34,9 @@ jobs: run: shell: bash + env: + PIP_DISABLE_PIP_VERSION_CHECK: 1 + steps: - uses: actions/checkout@v2 with: @@ -85,6 +88,9 @@ jobs: runs-on: ubuntu-latest + env: + PIP_DISABLE_PIP_VERSION_CHECK: 1 + steps: - uses: actions/checkout@v2 with: @@ -93,10 +99,10 @@ jobs: - name: Check if release PR. uses: edgedb/action-release/validate-pr@master - continue-on-error: true id: release with: github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + missing_version_ok: yes version_file: asyncpg/_version.py version_line_pattern: | __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) From b694c3a477bbab6157bf402320f9f52586e33bff Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 20:50:55 -0700 Subject: [PATCH 057/193] Use cibuildwheel to build release wheels --- .github/workflows/build-manylinux-wheels.sh | 25 ---- .github/workflows/install-postgres.sh | 48 +++++-- .github/workflows/release.yml | 139 +++----------------- .github/workflows/test-wheels.sh | 7 - .github/workflows/tests.yml | 5 - asyncpg/cluster.py | 10 +- 6 files changed, 67 insertions(+), 167 deletions(-) delete mode 100755 .github/workflows/build-manylinux-wheels.sh delete mode 100755 .github/workflows/test-wheels.sh diff --git a/.github/workflows/build-manylinux-wheels.sh b/.github/workflows/build-manylinux-wheels.sh deleted file mode 100755 index 53ed6d33..00000000 --- a/.github/workflows/build-manylinux-wheels.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -set -e -x - -PY_MAJOR=${PYTHON_VERSION%%.*} -PY_MINOR=${PYTHON_VERSION#*.} - -ML_PYTHON_VERSION="cp${PY_MAJOR}${PY_MINOR}-cp${PY_MAJOR}${PY_MINOR}" -if [ "${PY_MAJOR}" -lt "4" -a "${PY_MINOR}" -lt "8" ]; then - ML_PYTHON_VERSION+="m" -fi - -# Compile wheels -PYTHON="/opt/python/${ML_PYTHON_VERSION}/bin/python" -PIP="/opt/python/${ML_PYTHON_VERSION}/bin/pip" -"${PIP}" install --upgrade setuptools pip wheel -cd "${GITHUB_WORKSPACE}" -make clean -"${PYTHON}" setup.py bdist_wheel - -# Bundle external shared libraries into the wheels. -for whl in "${GITHUB_WORKSPACE}"/dist/*.whl; do - auditwheel repair $whl -w "${GITHUB_WORKSPACE}"/dist/ - rm "${GITHUB_WORKSPACE}"/dist/*-linux_*.whl -done diff --git a/.github/workflows/install-postgres.sh b/.github/workflows/install-postgres.sh index 5c6f38bb..70d42f60 100755 --- a/.github/workflows/install-postgres.sh +++ b/.github/workflows/install-postgres.sh @@ -3,14 +3,42 @@ set -Eexuo pipefail shopt -s nullglob -export DEBIAN_FRONTEND=noninteractive +PGVERSION=${PGVERSION:-12} -apt-get install -y --no-install-recommends curl -curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - -mkdir -p /etc/apt/sources.list.d/ -echo "deb https://apt.postgresql.org/pub/repos/apt/ ${DISTRO_NAME}-pgdg main" \ - >> /etc/apt/sources.list.d/pgdg.list -apt-get update -apt-get install -y --no-install-recommends \ - postgresql-${PGVERSION} \ - postgresql-contrib-${PGVERSION} +if [ -e /etc/os-release ]; then + source /etc/os-release +elif [ -e /etc/centos-release ]; then + ID="centos" + VERSION_ID=$(cat /etc/centos-release | cut -f3 -d' ' | cut -f1 -d.) +else + echo "install-postgres.sh: cannot determine which Linux distro this is" >&2 + exit 1 +fi + +if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then + export DEBIAN_FRONTEND=noninteractive + + apt-get install -y --no-install-recommends curl gnupg ca-certificates + curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - + mkdir -p /etc/apt/sources.list.d/ + echo "deb https://apt.postgresql.org/pub/repos/apt/ ${VERSION_CODENAME}-pgdg main" \ + >> /etc/apt/sources.list.d/pgdg.list + apt-get update + apt-get install -y --no-install-recommends \ + "postgresql-${PGVERSION}" \ + "postgresql-contrib-${PGVERSION}" +elif [ "${ID}" = "centos" ]; then + el="EL-${VERSION_ID}-$(arch)" + baseurl="https://download.postgresql.org/pub/repos/yum/reporpms" + yum install -y "${baseurl}/${el}/pgdg-redhat-repo-latest.noarch.rpm" + if [ ${VERSION_ID} -ge 8 ]; then + dnf -qy module disable postgresql + fi + yum install -y \ + "postgresql${PGVERSION}-server" \ + "postgresql${PGVERSION}-contrib" + ln -s "/usr/pgsql-${PGVERSION}/bin/pg_config" "/usr/local/bin/pg_config" +else + echo "install-postgres.sh: Unsupported distro: ${distro}" >&2 + exit 1 +fi diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 55d2032f..e388e7bb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,21 +66,16 @@ jobs: - uses: actions/upload-artifact@v2 with: name: dist - path: dist/ + path: dist/*.tar.* build-wheels: needs: validate-release-request runs-on: ${{ matrix.os }} strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0.rc.1] os: [ubuntu-latest, macos-latest, windows-latest] - arch: [x86_64] - exclude: - - os: windows-latest - arch: aarch64 - - os: macos-latest - arch: aarch64 + cibw_python: ["cp37-*", "cp38-*", "cp39-*", "cp310-*"] + cibw_arch: ["auto64"] defaults: run: @@ -95,121 +90,29 @@ jobs: fetch-depth: 50 submodules: true - - name: Set up QEMU - if: matrix.arch == 'aarch64' - uses: docker/setup-qemu-action@v1 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - - name: Install Python Deps - run: | - python -m pip install --upgrade setuptools pip wheel - - - name: Build Wheels (linux) - if: startsWith(matrix.os, 'ubuntu') + - uses: pypa/cibuildwheel@v2.1.1 env: - PYTHON_VERSION: ${{ matrix.python-version }} - ARCH: ${{ matrix.arch }} - run: | - case "${ARCH}" in - x86_64) - mlimg=manylinux1_x86_64 - ;; - aarch64) - mlimg=manylinux2014_aarch64 - ;; - *) - echo "Unsupported wheel arch: ${ARCH}" >&2 - exit 1 - ;; - esac - - docker run --rm \ - -v "${GITHUB_WORKSPACE}":/github/workspace:rw \ - --workdir=/github/workspace \ - -e GITHUB_WORKSPACE=/github/workspace \ - -e PYTHON_VERSION="${PYTHON_VERSION}" \ - --entrypoint=/github/workspace/.github/workflows/build-manylinux-wheels.sh \ - quay.io/pypa/${mlimg} - - - name: Build Wheels (non-linux) - if: "!startsWith(matrix.os, 'ubuntu')" - run: | - make clean - python setup.py bdist_wheel - - - name: Test Wheels (native) - if: | - !contains(github.event.pull_request.labels.*.name, 'skip wheel tests') - && matrix.arch == 'x86_64' - env: - OS: ${{ matrix.os }} - run: | - if [ "${OS}" = "windows-latest" ]; then - export PGINSTALLATION="${PGBIN}" - fi - "${GITHUB_WORKSPACE}/.github/workflows/test-wheels.sh" - - - name: Test Wheels (emulated) - if: | - !contains(github.event.pull_request.labels.*.name, 'skip wheel tests') - && matrix.arch != 'x86_64' - env: - PYTHON_VERSION: ${{ matrix.python-version }} - PGVERSION: 13 - DISTRO_NAME: focal - ARCH: ${{ matrix.arch }} - run: | - sudo env DISTRO_NAME="${DISTRO_NAME}" PGVERSION="${PGVERSION}" \ - .github/workflows/install-postgres.sh - # Allow docker guest to connect to the database - echo "port = 5433" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf - echo "listen_addresses = '*'" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf - echo "host all all 172.17.0.0/16 trust" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/pg_hba.conf - if [ "${PGVERSION}" -ge "11" ]; then - # Disable JIT to avoid unpredictable timings in tests. - echo "jit = off" | \ - sudo tee --append /etc/postgresql/${PGVERSION}/main/postgresql.conf - fi - sudo pg_ctlcluster ${PGVERSION} main restart - - case "${ARCH}" in - aarch64) - img="docker.io/arm64v8/python:${PYTHON_VERSION}-buster" - ;; - *) - echo "Unsupported wheel arch: ${ARCH}" >&2 - exit 1 - ;; - esac - - docker run --rm \ - -v "${GITHUB_WORKSPACE}":/github/workspace:rw \ - -e GITHUB_WORKSPACE=/github/workspace \ - -e PYTHON_VERSION="${PYTHON_VERSION}" \ - --workdir=/github/workspace/ \ - ${img} \ - /bin/bash -ex -c ' \ - echo GITHUB_WORKSPACE=${GITHUB_WORKSPACE} >> /etc/environment \ - && echo PGHOST=$(ip route | grep default | cut -f3 -d" " | uniq) \ - >> /etc/environment \ - && echo PGPORT=5433 >> /etc/environment \ - && echo PGUSER=postgres >> /etc/environment \ - && echo ENVIRON_FILE /etc/environment >> /etc/login.defs \ - && useradd -m -s /bin/bash test \ - && su -l test /github/workspace/.github/workflows/test-wheels.sh \ - ' + CIBW_BUILD_VERBOSITY: 1 + CIBW_BUILD: ${{ matrix.cibw_python }} + CIBW_ARCHS: ${{ matrix.cibw_arch }} + CIBW_BEFORE_ALL_LINUX: > + yum -y install libffi-devel + && env PGVERSION=12 .github/workflows/install-postgres.sh + && useradd -m -s /bin/bash apgtest + CIBW_TEST_EXTRAS: "test" + CIBW_TEST_COMMAND: > + python {project}/tests/__init__.py + CIBW_TEST_COMMAND_WINDOWS: > + python {project}\tests\__init__.py + CIBW_TEST_COMMAND_LINUX: > + PY=`which python` + && chmod -R go+rX "$(dirname $(dirname $(dirname $PY)))" + && su -p -l apgtest -c "$PY {project}/tests/__init__.py" - uses: actions/upload-artifact@v2 with: name: dist - path: dist/ + path: wheelhouse/*.whl publish-docs: needs: validate-release-request diff --git a/.github/workflows/test-wheels.sh b/.github/workflows/test-wheels.sh deleted file mode 100755 index a65828ce..00000000 --- a/.github/workflows/test-wheels.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -Eexuo pipefail -shopt -s nullglob - -pip install --pre -f "file:///${GITHUB_WORKSPACE}/dist" asyncpg -make -C "${GITHUB_WORKSPACE}" testinstalled diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 496d4c47..defe9d7a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -59,11 +59,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Configure PostgreSQL Environment - if: matrix.os == 'windows-latest' - run: | - echo PGINSTALLATION="${PGBIN}" >> "${GITHUB_ENV}" - - name: Install Python Deps if: steps.release.outputs.version == 0 run: | diff --git a/asyncpg/cluster.py b/asyncpg/cluster.py index 74d303ce..0999e41c 100644 --- a/asyncpg/cluster.py +++ b/asyncpg/cluster.py @@ -53,7 +53,10 @@ class Cluster: def __init__(self, data_dir, *, pg_config_path=None): self._data_dir = data_dir self._pg_config_path = pg_config_path - self._pg_bin_dir = os.environ.get('PGINSTALLATION') + self._pg_bin_dir = ( + os.environ.get('PGINSTALLATION') + or os.environ.get('PGBIN') + ) self._pg_ctl = None self._daemon_pid = None self._daemon_process = None @@ -518,7 +521,10 @@ def _run_pg_config(self, pg_config_path): def _find_pg_config(self, pg_config_path): if pg_config_path is None: - pg_install = os.environ.get('PGINSTALLATION') + pg_install = ( + os.environ.get('PGINSTALLATION') + or os.environ.get('PGBIN') + ) if pg_install: pg_config_path = platform_exe( os.path.join(pg_install, 'pg_config')) From 36658fa9cff0c4910ad8c042e754ef37b23919e7 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Aug 2021 18:06:59 -0700 Subject: [PATCH 058/193] asyncpg v0.24.0 Changes ------- * Drop support for Python 3.5 (#777) (by @and-semakin in da58cd26 for #777) * Add support for Python 3.10 (#795) (by @elprans in abf55699 for #795) * Add support for asynchronous iterables to copy_records_to_table() (#713) (by @elprans in 1d33ff62 for #713) * Add support for coroutine functions as listener callbacks (#802) (by @elprans in 41da093e for #802) * Add support for sslcert, sslkey and sslrootcert parameters to DSN (#768) (by @jdobes and @elprans in c674e86a for #768) * Add copy_ wrappers to Pool (#661) (by @elprans in a6b0f283 for #661) * Add issubset and issuperset methods to the Range type (#563) (by @kdorsel in de07d0ab for #563) Fixes ----- * Break connection internal circular reference (#774) (by @fantix in d08a9b8b for #774) * Make Server Version Extraction More Flexible (#778) (by @Natrinicle in d0761694 for #778) --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index eab825c7..2f106a98 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.24.0.dev0' +__version__ = '0.24.0' From 383c711eb68bc6a042c121e1fddfde0cdefb8068 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Wed, 15 Sep 2021 15:10:47 -0400 Subject: [PATCH 059/193] Fix SSL compatibility of libpq (#827) Co-authored-by: Elvis Pranskevichus --- asyncpg/connect_utils.py | 159 ++++++++++++++++----- asyncpg/connection.py | 14 ++ tests/certs/ca.cert.pem | 66 ++++----- tests/certs/ca.crl.pem | 19 +++ tests/certs/ca.key.pem | 51 +++++++ tests/certs/client.key.protected.pem | 30 ++++ tests/certs/gen.py | 202 ++++++++++++++++++++++++++ tests/certs/server.cert.pem | 63 ++++---- tests/certs/server.key.pem | 98 ++++++------- tests/test_connect.py | 205 +++++++++++++++++++++------ 10 files changed, 713 insertions(+), 194 deletions(-) create mode 100644 tests/certs/ca.crl.pem create mode 100644 tests/certs/ca.key.pem create mode 100644 tests/certs/client.key.protected.pem create mode 100644 tests/certs/gen.py diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index cd94b834..f6f9d651 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -18,6 +18,7 @@ import ssl as ssl_module import stat import struct +import sys import time import typing import urllib.parse @@ -220,13 +221,35 @@ def _parse_hostlist(hostlist, port, *, unquote=False): return hosts, port +def _parse_tls_version(tls_version): + if not hasattr(ssl_module, 'TLSVersion'): + raise ValueError( + "TLSVersion is not supported in this version of Python" + ) + if tls_version.startswith('SSL'): + raise ValueError( + f"Unsupported TLS version: {tls_version}" + ) + try: + return ssl_module.TLSVersion[tls_version.replace('.', '_')] + except KeyError: + raise ValueError( + f"No such TLS version: {tls_version}" + ) + + +def _dot_postgresql_path(filename) -> pathlib.Path: + return (pathlib.Path.home() / '.postgresql' / filename).resolve() + + def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, connect_timeout, server_settings): # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. auth_hosts = None - sslcert = sslkey = sslrootcert = sslcrl = None + sslcert = sslkey = sslrootcert = sslcrl = sslpassword = None + ssl_min_protocol_version = ssl_max_protocol_version = None if dsn: parsed = urllib.parse.urlparse(dsn) @@ -312,24 +335,29 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, ssl = val if 'sslcert' in query: - val = query.pop('sslcert') - if sslcert is None: - sslcert = val + sslcert = query.pop('sslcert') if 'sslkey' in query: - val = query.pop('sslkey') - if sslkey is None: - sslkey = val + sslkey = query.pop('sslkey') if 'sslrootcert' in query: - val = query.pop('sslrootcert') - if sslrootcert is None: - sslrootcert = val + sslrootcert = query.pop('sslrootcert') if 'sslcrl' in query: - val = query.pop('sslcrl') - if sslcrl is None: - sslcrl = val + sslcrl = query.pop('sslcrl') + + if 'sslpassword' in query: + sslpassword = query.pop('sslpassword') + + if 'ssl_min_protocol_version' in query: + ssl_min_protocol_version = query.pop( + 'ssl_min_protocol_version' + ) + + if 'ssl_max_protocol_version' in query: + ssl_max_protocol_version = query.pop( + 'ssl_max_protocol_version' + ) if query: if server_settings is None: @@ -451,34 +479,97 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if sslmode < SSLMode.allow: ssl = False else: - ssl = ssl_module.create_default_context( - ssl_module.Purpose.SERVER_AUTH) + ssl = ssl_module.SSLContext(ssl_module.PROTOCOL_TLS_CLIENT) ssl.check_hostname = sslmode >= SSLMode.verify_full - ssl.verify_mode = ssl_module.CERT_REQUIRED - if sslmode <= SSLMode.require: + if sslmode < SSLMode.require: ssl.verify_mode = ssl_module.CERT_NONE + else: + if sslrootcert is None: + sslrootcert = os.getenv('PGSSLROOTCERT') + if sslrootcert: + ssl.load_verify_locations(cafile=sslrootcert) + ssl.verify_mode = ssl_module.CERT_REQUIRED + else: + sslrootcert = _dot_postgresql_path('root.crt') + try: + ssl.load_verify_locations(cafile=sslrootcert) + except FileNotFoundError: + if sslmode > SSLMode.require: + raise ValueError( + f'root certificate file "{sslrootcert}" does ' + f'not exist\nEither provide the file or ' + f'change sslmode to disable server ' + f'certificate verification.' + ) + elif sslmode == SSLMode.require: + ssl.verify_mode = ssl_module.CERT_NONE + else: + assert False, 'unreachable' + else: + ssl.verify_mode = ssl_module.CERT_REQUIRED - if sslcert is None: - sslcert = os.getenv('PGSSLCERT') + if sslcrl is None: + sslcrl = os.getenv('PGSSLCRL') + if sslcrl: + ssl.load_verify_locations(cafile=sslcrl) + ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN + else: + sslcrl = _dot_postgresql_path('root.crl') + try: + ssl.load_verify_locations(cafile=sslcrl) + except FileNotFoundError: + pass + else: + ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN if sslkey is None: sslkey = os.getenv('PGSSLKEY') - - if sslrootcert is None: - sslrootcert = os.getenv('PGSSLROOTCERT') - - if sslcrl is None: - sslcrl = os.getenv('PGSSLCRL') - + if not sslkey: + sslkey = _dot_postgresql_path('postgresql.key') + if not sslkey.exists(): + sslkey = None + if not sslpassword: + sslpassword = '' + if sslcert is None: + sslcert = os.getenv('PGSSLCERT') if sslcert: - ssl.load_cert_chain(sslcert, keyfile=sslkey) - - if sslrootcert: - ssl.load_verify_locations(cafile=sslrootcert) - - if sslcrl: - ssl.load_verify_locations(cafile=sslcrl) - ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN + ssl.load_cert_chain( + sslcert, keyfile=sslkey, password=lambda: sslpassword + ) + else: + sslcert = _dot_postgresql_path('postgresql.crt') + try: + ssl.load_cert_chain( + sslcert, keyfile=sslkey, password=lambda: sslpassword + ) + except FileNotFoundError: + pass + + # OpenSSL 1.1.1 keylog file, copied from create_default_context() + if hasattr(ssl, 'keylog_filename'): + keylogfile = os.environ.get('SSLKEYLOGFILE') + if keylogfile and not sys.flags.ignore_environment: + ssl.keylog_filename = keylogfile + + if ssl_min_protocol_version is None: + ssl_min_protocol_version = os.getenv('PGSSLMINPROTOCOLVERSION') + if ssl_min_protocol_version: + ssl.minimum_version = _parse_tls_version( + ssl_min_protocol_version + ) + else: + try: + ssl.minimum_version = _parse_tls_version('TLSv1.2') + except ValueError: + # Python 3.6 does not have ssl.TLSVersion + pass + + if ssl_max_protocol_version is None: + ssl_max_protocol_version = os.getenv('PGSSLMAXPROTOCOLVERSION') + if ssl_max_protocol_version: + ssl.maximum_version = _parse_tls_version( + ssl_max_protocol_version + ) elif ssl is True: ssl = ssl_module.create_default_context() diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 26249679..a7ec7719 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2020,6 +2020,20 @@ async def connect(dsn=None, *, The ``sslcert``, ``sslkey``, ``sslrootcert``, and ``sslcrl`` options are supported in the *dsn* argument. + .. versionchanged:: 0.25.0 + The ``sslpassword``, ``ssl_min_protocol_version``, + and ``ssl_max_protocol_version`` options are supported in the *dsn* + argument. + + .. versionchanged:: 0.25.0 + Default system root CA certificates won't be loaded when specifying a + particular sslmode, following the same behavior in libpq. + + .. versionchanged:: 0.25.0 + The ``sslcert``, ``sslkey``, ``sslrootcert``, and ``sslcrl`` options + in the *dsn* argument now have consistent default values of files under + ``~/.postgresql/`` as libpq. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context diff --git a/tests/certs/ca.cert.pem b/tests/certs/ca.cert.pem index 0329883d..4a8a7016 100644 --- a/tests/certs/ca.cert.pem +++ b/tests/certs/ca.cert.pem @@ -1,35 +1,35 @@ -----BEGIN CERTIFICATE----- -MIIGFzCCA/+gAwIBAgIJAPTCST3Z/WinMA0GCSqGSIb3DQEBCwUAMIGhMQswCQYD -VQQGEwJDQTEQMA4GA1UECAwHT250YXJpbzEQMA4GA1UEBwwHVG9yb250bzEYMBYG -A1UECgwPTWFnaWNTdGFjayBJbmMuMRYwFAYDVQQLDA1hc3luY3BnIHRlc3RzMR0w -GwYDVQQDDBRhc3luY3BnIHRlc3Qgcm9vdCBjYTEdMBsGCSqGSIb3DQEJARYOaGVs -bG9AbWFnaWMuaW8wHhcNMTcwNDAzMTYxMzMwWhcNMzcwMzI5MTYxMzMwWjCBoTEL -MAkGA1UEBhMCQ0ExEDAOBgNVBAgMB09udGFyaW8xEDAOBgNVBAcMB1Rvcm9udG8x -GDAWBgNVBAoMD01hZ2ljU3RhY2sgSW5jLjEWMBQGA1UECwwNYXN5bmNwZyB0ZXN0 -czEdMBsGA1UEAwwUYXN5bmNwZyB0ZXN0IHJvb3QgY2ExHTAbBgkqhkiG9w0BCQEW -DmhlbGxvQG1hZ2ljLmlvMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA -zxreg1IEqX/g1IFwpNCc9hKa7YYMPk8mo4l+pE4CKXA9cQreaIiDg+l7+pJL3FMa -a/7cuUsBlVOq/T+9gmjzdWDTHTdq55PQx6co4OlRyPGad2kMwYlAERB6s2jGfuwM -sS0JJ3VPxUBXwB5ljq18L+HPsZXZhZOl6pBW74dfQE5SJZLTGIX6mbtwR+uQgaow -1RsMwFAGvwDu8c8+3lmUinGhlHXRJAhbncnlOWmAqa3Yf8rny0JeX7wz5x3vbxnX -9p9XMaXtV+hQWFHn21nAYjsCnDin6oyC2zUi9ahN5njKu+tUYA+K0ImliTAQNQ39 -m9SZvGNS2uIj/ryYVsI9FjgyJgV6JGcb0q1j2BPUmpPKwHN+sPkdKZy+Z4mVBiel -mc7X6J9aEXxrvFIjhZOwhYn3RwpwguDFU5qY1Y9wzTg1HMLfQfzWdyInNEi4s96z -biicisVMnR84syClg2RN56U+0hTJeYKTnYh/xV959EqoFfpUI2GZIxNmHr5p8S3M -7uSeBxoovmUYadhF9SlKx+dABd/K1HBKfMC4z2iw9z6r4QGOnKoMy0eAn5wzL7wL -+h6znRPm28Qr9NEg8qJ9r1pfF3uhwgZw8hL8iytNfdUIneQVqoHApd33SxHFaO29 -2Nuc19ucySNsMFBIVSg1D5LGjcJYz3NZpleQsIwLhvMCAwEAAaNQME4wHQYDVR0O -BBYEFOcVk1n/NisD3qXqtpSsWm+pXd0XMB8GA1UdIwQYMBaAFOcVk1n/NisD3qXq -tpSsWm+pXd0XMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAEFyCFmn -vc6EjKRld+G8Q1UBRCviNwAvTUyn6LfGFKeimCGlrXEIj08e15oSMVtbWYrs1vWk -x9JJIJYSbaWJM6eaWmbPYgYzQaiDdWnZb/fXg20gDaFtTamDrqws44yPHgkF8B+k -fBdkG6w59lGuwz2n8shag4ATDRambJBW1TV+6WAOH2FRQ6Mn/yz4qFGlI/r7yeCJ -CcQ3KWcrmbqA+GeNCNFyP1CHh+1DXYydVJULZ8hO7TcAkHgKZuHA37N5WGr2Yb+1 -wVH8v2vXpka1wosENU5dMPgtJQ9raEVZEh6HQY81G5/rtUIEuLuHFGkMv9LiuV2/ -FhXGjwyfmDaRADIEH0j0e2NeKk3tLlHb+2cZgKRvwL0a/RkovgUtKN3/ZGHsuPFe -YTk7RXn3DFpnhVltrg1vRPgR3euKKSVyw/DTPo1sQN205Lgcot+zshUIER/ELZBu -77AeDK9wbjxG34vdPaNz+bpVpJxZWHyO0CSKpXYwUcdr5iU2VrWJrj4Mnvat9Elo -BV6lkgdM47ngJ+bS4QpbvZG0YBzaN6mnXEQf3Zw1TkR+31m7vhRKilnObhG+Ylzq -H6E/a1MVtTRu1FkhTHdHJmolMVSHAytZZnee5PC/1AlMcKdWEv8A5up9sTjGesFM -ztcZLWC9GiyC/TFSJ1hDylkvvwcCX6PD7fLu +MIIGFjCCA/6gAwIBAgIIDAM+rFY5KqgwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNV +BAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdUb3JvbnRvMRgwFgYD +VQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5jcGcgdGVzdHMxHTAb +BgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJKoZIhvcNAQkBFg5oZWxs +b0BtYWdpYy5pbzAeFw0yMTA5MTMxNjA2MDFaFw00MDExMTMxNjA2MDFaMIGhMQsw +CQYDVQQGEwJDQTEQMA4GA1UECAwHT250YXJpbzEQMA4GA1UEBwwHVG9yb250bzEY +MBYGA1UECgwPTWFnaWNTdGFjayBJbmMuMRYwFAYDVQQLDA1hc3luY3BnIHRlc3Rz +MR0wGwYDVQQDDBRhc3luY3BnIHRlc3Qgcm9vdCBjYTEdMBsGCSqGSIb3DQEJARYO +aGVsbG9AbWFnaWMuaW8wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDK +mu24288Os23VtRf8kp57sj7+s+PSD/8+KiZiJ4sy5KrUUVijVQgfCpxPzpWWtQ/7 +JbjQMt+kZqJwKqdzXAY8osnljpYYvbNWnc0GZY09F6z95GqVgX/81Fe8W3Jz6I9w +S2CXVneKGtux+6fztKbrA2b1kn69b3xClEHRLFZl9hKG8ck2H+gI5AEDgQmhTIXa +pl85bPuh54uKiUGnedPk07biCw3ZE5GTGWzEq5qMqFEfb19/L1vOvgx/Q4aqmjJw +lONB9DzMftetdKaR5SS+vH0QUhiWXwy7j1TjYtJP4M6fLinwguMYG8Qbg7NkL4QC +9T7zR5CZPJ0Q/Npiwv7qdMzyL7QklZ9y3YeA5wceyc2/zh0INN5bf4J1mDZjhYH9 +CIgVHSj6z44rWq9L+OzYT0EMDhZO0OeakTWgqXNICfeEXZ5hy3QVCUvKrgmnqs0f +imdH6dZQIGQIQ8Vcg/psk2hEP1hRWROn/cgCdadcEqbMdbtOUuMcnr0K6B/bVbXx +jAV4eVcCcS3w3wIG4Ki2aIXnXrHyEJmZJb03Ko7VXP0NTGuGfPYQj2ox4a4wViOG +pxxbnGGAFqV+BIVlhUMfL9PlatqsI6kUzJIsJUiyk6oPb3KeNQ5+MtS0S1DV0jA5 +wxDQZyEFiUsl6GLYSm4RajxoHdLR7Xqj3D7EWKGt/wIDAQABo1AwTjAMBgNVHRME +BTADAQH/MB0GA1UdDgQWBBRvLFXv6sI+ePP5aegYUWoVHAfRzTAfBgNVHSMEGDAW +gBRvLFXv6sI+ePP5aegYUWoVHAfRzTANBgkqhkiG9w0BAQsFAAOCAgEAK+QAtzhk +ih8Tng9cOheswrbWf9pclMyfl38+NsJxsZnpa2SlBp3qJl0fymyNLLBfyeRUFr++ +x1cRAEwVv6R6Iepj252+U+Cmz48xIthF29JxoC+x2P2YDGyqVBm4uuw54EIF0r0H +AvjTPSNa54gA3+KiK64ypFdlHZrwx3W9b5tUsfycpj2Jrn2HgTbWQD2gaYeIIdq6 +DNmPCJg6NQE9jlvNmVqlBavjc7MJqqd+0+XtCIWhaoqeu/T6g2Epth25cuqPKc0E +rltKiXNiZHcDfFnu7B6kw2LVA6EQdf5GO9JtAaiwhRugp1dJ5rdQqdaYpJngZtvd +8+PSdDZrXow0a1jW2w+3lM5XW3qtzIKJz4Q8CXL540s+SeRjLRwY02OZCvG4fC8c +D57MIFKoReYy5LgBHdPGmx8Kexo7vk2ib9taQCSd6fh0Ol070pNiOnLP9lE9iEqq +EvU1A+0dtPHbfyXqw9tdY18nxXbooypQZSqfxPSq3Bpv8KTsr9SSG+DV2LcJRfvi +OfVTPeIWW8C8SkbEXaTCUVgaNeYqvFsfsvkTmfhO8GHglDgnsveXHfnAwlC2Uxdq +T64oKToV7N1L2RA0JR9gJ4RQwPfyaFOHOPjd+3t4DFVl54GNbNfvELHRReoyJPse +SZeL4h6T3L17FWzugHMjxFi4f1/nPNk7d5Y= -----END CERTIFICATE----- diff --git a/tests/certs/ca.crl.pem b/tests/certs/ca.crl.pem new file mode 100644 index 00000000..b5eb1d2e --- /dev/null +++ b/tests/certs/ca.crl.pem @@ -0,0 +1,19 @@ +-----BEGIN X509 CRL----- +MIIDAjCB6wIBATANBgkqhkiG9w0BAQsFADCBoTELMAkGA1UEBhMCQ0ExEDAOBgNV +BAgMB09udGFyaW8xEDAOBgNVBAcMB1Rvcm9udG8xGDAWBgNVBAoMD01hZ2ljU3Rh +Y2sgSW5jLjEWMBQGA1UECwwNYXN5bmNwZyB0ZXN0czEdMBsGA1UEAwwUYXN5bmNw +ZyB0ZXN0IHJvb3QgY2ExHTAbBgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvFw0y +MTA5MTQxNjA2MDFaFw0yMTA5MTUxNjA2MDFaMBUwEwICEAAXDTIxMDkxNDE2MDYw +MVowDQYJKoZIhvcNAQELBQADggIBAL4yfNmvGS8SkIVbRzdAC9+XJPw/dBJOUJwr +EgERICAz7OTqG1PkmMhPL00Dm9fe52+KnSwHgL749W0S/X5rTNMSwLyGiiJ5HYbH +GFRKQ/cvXLi4jYpSI1Ac94kk0japf3SfwEw3+122oba8SiAVP0nY3bHpHvNfOaDV +fhbFTwb5bFm6ThqlKLZxGCKP0fGeQ4homuwgRiLE/UOiue5ted1ph0PkKVui208k +FnhNYXSllakTGT8ZZZZVid/4tSHqJEY9vbdMXNv1GX8mhjoU1Gv9dOuyFGgUc9Vx +e7gzf/Wf36vKI29o8QGkkTslRZpMG59z3sG4Y0vJEoqXMB6eQLOr5iUCyj2CyDha +66pwrdc1fRt3EvNXUWkdHfY3EHb7DxueedDEgtmfSNbEaZTXa5RaZRavNGNTaPDf +UcrDU4w1N0wkYLQxPqd+VPcf1iKyfkAydpeOq9CChqRD0Tx58eTn6N/lLGFPPRfs +x47BA4FmefBeXZzd5HiXCUouk3qHIHs2yCzFs+TEBkx5eV42cP++HxjirPydLf6Y +G/o/TKRnc/2Lw+dCzvUV/p3geuw4+vq1BIFanwB9jp4tGaBrffIAyle8vPQLw6bp +1o1O39pdxniz+c9r0Kw/ETxTqRLbasSib5FHq5G/G9a+QxPsLAzKgwLWhR4fXvbu +YPbhYhRP +-----END X509 CRL----- diff --git a/tests/certs/ca.key.pem b/tests/certs/ca.key.pem new file mode 100644 index 00000000..2d73448f --- /dev/null +++ b/tests/certs/ca.key.pem @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJKQIBAAKCAgEAyprtuNvPDrNt1bUX/JKee7I+/rPj0g//PiomYieLMuSq1FFY +o1UIHwqcT86VlrUP+yW40DLfpGaicCqnc1wGPKLJ5Y6WGL2zVp3NBmWNPRes/eRq +lYF//NRXvFtyc+iPcEtgl1Z3ihrbsfun87Sm6wNm9ZJ+vW98QpRB0SxWZfYShvHJ +Nh/oCOQBA4EJoUyF2qZfOWz7oeeLiolBp3nT5NO24gsN2RORkxlsxKuajKhRH29f +fy9bzr4Mf0OGqpoycJTjQfQ8zH7XrXSmkeUkvrx9EFIYll8Mu49U42LST+DOny4p +8ILjGBvEG4OzZC+EAvU+80eQmTydEPzaYsL+6nTM8i+0JJWfct2HgOcHHsnNv84d +CDTeW3+CdZg2Y4WB/QiIFR0o+s+OK1qvS/js2E9BDA4WTtDnmpE1oKlzSAn3hF2e +Yct0FQlLyq4Jp6rNH4pnR+nWUCBkCEPFXIP6bJNoRD9YUVkTp/3IAnWnXBKmzHW7 +TlLjHJ69Cugf21W18YwFeHlXAnEt8N8CBuCotmiF516x8hCZmSW9NyqO1Vz9DUxr +hnz2EI9qMeGuMFYjhqccW5xhgBalfgSFZYVDHy/T5WrarCOpFMySLCVIspOqD29y +njUOfjLUtEtQ1dIwOcMQ0GchBYlLJehi2EpuEWo8aB3S0e16o9w+xFihrf8CAwEA +AQKCAgEApJFdgOdCc415LLpxJl4tzwnEs3yJE8qcp/Dyxo2aOpeUzurYVasu8o/a +0dRam1StC3HjgXGhSNd5ICT1aPWZt0z/M7Ay6RvFfRimPYjlRXdis8QCczgCLuqH +7V5WRCHlyO/hIGxCovIX+6UPEhxt7L0Rt2zr95GD3EyyfWZHM4DCIcxphMY74mTZ +EfCRUuxmWWkENg/5ANSj+r5sjs2dOORjS45xDB8iAtsHB2TgH1pksmTzq8pbBz5F +xmWiEBc520qEocDyVaS+KY1z81OuGiPebhBRGmtQW1UcPaq6a9mN26xSsqKONbnv +++1pHHqf/wsXu+IoaN/cML1B4jDDf1milC7mmgPdETQjbco7PvSsxzG3pZktijoT +8WfCMda4SFgkLMDEKyD5tyUGQFsvijXFf9y+/V0ux3u1Hm6NApDXTf7gX5W0b9tD +uiupzcwCtA5s9AO6G0bQnddwzFGh91/ydyc5DfaRjfrG95zYouwqmMQXTqYG1USX +mLrDgHw3ierlwVWKUR0OnysMeNYtu5782RO3LSdL126PKLd/pLvG7FrETLFECP3B +QgM/vKlNY26mcX4DuALRRLWu+ORrGMclEp7Bw/JPTkFxj2gLrmL6JM1h+CFXDBmk +pE0Cl2PDCVq4aFWZDn4F8ioT4XW/2REtxp7E2wazNnCX+IUap1ECggEBAOeXY9Ib +m0GayJVm7kvvL6pY2e/lHlvi44xcTG3GrkOn/qMLIDkXvUyfjcqHZQhMoYhnYx4K +iyK4D/Mej4Jbj5dyRKHEn8tKGuDrlzFp0CLRQvg1s/LcktX8hdef9IPXHA3y6ML5 +X60KNN1PI/7aINEENn1qOqDvU6X9ST3VGAWbfyM5jOZDHIBkjJuJTUwndaDbIA09 +AqxqQjq6UntCG+seXBmE1OHht++pWgN5rlq1wJ2KJlGR2HdhtIl1JyfU/hisnfFD +ahQMUFoFYS3ecNUNumbQEBaZ66/mHP0p2YhaLK3j3shC8vsN15LOW6Ulzlmw7I3s +tGqcShUaldjQYvkCggEBAN/1dQst70hWLtjRnP/0FidKtq3l2u0Lg6+K7CUsIOEa +QH1s0CobT5j7eWtodPkZkYCzulhiPXk32mW0uKiAglJ+LPaU7HgNrFlJKefCrStP +o8LcdeZujRhBkBvU+xytoxpKIhdie4td106sRCb63F66MtU+dSJqEl6/5Piz0zLT +YgrFitRaRA5/jW47BUV4ZBRnHqrBN4PhoaYPp7oYIue6E1G+REdsL9+I1B1PhUV2 +vmVHvoQkwqa1Ne9AZg1ZmTbnSojKV1c1T/uwwW/UEDo6v3+qMH/wTpXMk7DIE7ih +NW/FADYRHEd1M11zxLOMmq43C9/KD261N97H17NP3rcCggEBAJKdgzJ3C7li1m3P +NjmYeWKs0XxQXwHpCAnKPRCaYaSvbEOoPYQnhU5HDKsVQF8atID4gwV3w1H9mQtf +Y5cxhBxq2QxYwJkglxehzpwX0w7X0D/3L68m+UbDkbBKsa/ttPMXv0gAPBP+jC03 +dyBW08O/mQeZAvjzys8hJQciKw0RvlF8k7kK77ZQ8bteFzOJH6zwTMBUyaaBtuAb +KTCjT61wEPqO338JOTteyX+9vyXqPsD9vviRDqu1jWggZOOQsjTIw00EUtnSWeRD +15wEYQZgpIuGWUkVtOItGlkj73WlMPf9dQLvb4iE4N8uCVLqNlMN8RSAsE92Fmh5 +5jfW5XECggEAQEd5En5aoU5rH7v57dSmzxw4lmzUixi08RtUb87cmP8p51Xl4U/5 +ZpU24kcW27Ak/OWY5Gk9757CRlK6dVJ9FSQ1z4gq3sI951qCdox/m2C+Rd100XCF +eqLGs9ZLRI3ptE/2vPN9NiD2/ROgc/eobF/Q2zeT8w6yuxMkquUiBwJ4r1LHZ++I +fQjLFQpHlwrY3qpCOQw/3NBTzw/LOjRXQF890EZl3oIEs4nYJ5l9TNSqDPOskMzk +OWjlVAgNwmMnAIUd9Wjt7I/WpwyyWGBrT+swr3mvdekJBSG0ehbS4jkS10OZrer3 +TOMsnPPvTwFaHAqck9yw1TuaD40YMdUIvQKCAQAHpX7JP3Qbt7Q+hzq66BVWwlp6 +qdKKjlGGB7ciiFwuZWRI019ilbmmOjCfvFuVh4pyZgQH/TG/9HnZPBmuXd0Jy6VJ +SIQWZQ58G3SmIFqXZYA5Gxk2u4B/bPmptfPX/zxkaSV83dQu3L0PdPVnCTzv1qDn +MdCMbq7K53zF/j05tWRdF4iey64pmoBZx7G3Ky9cwdMsKTm/7AHi0UBTHwGCrDFL +BDS6XW1ylSa0QJrd2+yryae+N0iYXA+5WmY6yuLkUrGXcf96e3ufrs73di5R10IV +D38YeZHQEIK5gmfWC9Ma5HZb6TB/CtweirY4IddUiPEpHJFmOV+TkGBmntF6 +-----END RSA PRIVATE KEY----- diff --git a/tests/certs/client.key.protected.pem b/tests/certs/client.key.protected.pem new file mode 100644 index 00000000..0c5a9795 --- /dev/null +++ b/tests/certs/client.key.protected.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-256-CBC,B222CD7D00828606A07DBC489D400921 + +LRHsNGUsD5bG9+x/1UlzImN0rqEF10sFPBmxKeQpXQ/hy4iR+X/Gagoyagi23wOn +EZf0sCLJx95ixG+4fXJDX0jgBtqeziVNS4FLWHIuf3+blja8nf4tkmmH9pF8jFQ0 +i1an3TP6KRyDKa17gioOdtSsS51BZmPkp3MByJQsrMhyB0txEUsGtUMaBTYmVN/5 +uYHf9MsmfcfQy30nt2t6St6W82QupHHMOx5xyhPJo8cqQncZC7Dwo4hyDV3h3vWn +UjaRZiEMmQ3IgCwfJd1VmMECvrwXd/sTOXNhofWwDQIqmQ3GGWdrRnmgD863BQT3 +V8RVyPLkutOnrZ/kiMSAuiXGsSYK0TV8F9TaP/abLob4P8jbKYLcuR7ws3cu1xBl +XWt9RALxGPUyHIy+BWLXJTYL8T+TVJpiKsAGCQB54j8VQBSArwFL4LnzdUu1txe2 +qa6ZEwt4q6SEwOTJpJWz3oJ1j+OTsRCN+4dlyo7sEZMeyTRp9nUzwulhd+fOdIhY +2UllMG71opKfNxZzEW7lq6E/waf0MmxwjUJmgwVO218yag9oknHnoFwewF42DGY7 +072h23EJeKla7sI+MAB18z01z6C/yHWXLybOlXaGqk6zOm3OvTUFnUXtKzlBO2v3 +FQwrOE5U/VEyQkNWzHzh4j4LxYEL9/B08PxaveUwvNVGn9I3YknE6uMfcU7VuxDq ++6bgM6r+ez+9QLFSjH/gQuPs2DKX0h3b9ppQNx+MANX0DEGbGabJiBp887f8pG6Q +tW0i0+rfzYz3JwnwIuMZjYz6qUlP4bJMEmmDfod3fbnvg3MoCSMTUvi1Tq3Iiv4L +GM5/YNkL0V3PhOI686aBfU7GLGXQFhdbQ9xrSoQRBmmNBqTCSf+iIEoTxlBac8GQ +vSzDO+A+ovBP36K13Yn7gzuN/3PLZXH2TZ8t2b/OkEXOciH5KbycGHQA7gqxX1P4 +J55gpqPAWe8e7wKheWj3BMfmbWuH4rpiEkrLpqbTSfTwIKqplk253chmJj5I82XI +ioFLS5vCi9JJsTrQ720O+VQPVB5xeA80WL8NxamWQb/KkvVnb4dTmaV30RCgLLZC +tuMx8YSW71ALLT15qFB2zlMDKZO1jjunNE71BUFBPIkTKEOCyMAiF60fFeIWezxy +kvBBOg7+MTcZNeW110FqRWNGr2A5KYFN15g+YVpfEoF26slHisSjVW5ndzGh0kaQ +sIOjQitA9JYoLua7sHvsr6H5KdCGjNxv7O7y8wLGBVApRhU0wxZtbClqqEUvCLLP +UiLDp9L34wDL7sGrfNgWA4UuN29XQzTxI5kbv/EPKhyt2oVHLqUiE+eGyvnuYm+X +KqFi016nQaxTU5Kr8Pl0pSHbJMLFDWLSpsbbTB6YJpdEGxJoj3JB3VncOpwcuK+G +xZ1tV2orPt1s/6m+/ihzRgoEkyLwcLRPN7ojgD/sqS679ZGf1IkDMgFCQe4g0UWm +Fw7v816MNCgypUM5hQaU+Jp8vSlEc29RbrdSHbcxrKj/xPCLWrAbvmI5tgonKmuJ +J1LW8AXyh/EUp/uUh++jqVGx+8pFfcmJw6V6JrJzQ7HMlakkry7N1eAGrIJGtYCW +-----END RSA PRIVATE KEY----- diff --git a/tests/certs/gen.py b/tests/certs/gen.py new file mode 100644 index 00000000..c08f3061 --- /dev/null +++ b/tests/certs/gen.py @@ -0,0 +1,202 @@ +import datetime +import os + +from cryptography import x509 +from cryptography.hazmat import backends +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.x509 import oid + + +def _new_cert(issuer=None, is_issuer=False, serial_number=None, **subject): + backend = backends.default_backend() + private_key = rsa.generate_private_key( + public_exponent=65537, key_size=4096, backend=backend + ) + public_key = private_key.public_key() + subject = x509.Name( + [ + x509.NameAttribute(getattr(oid.NameOID, key.upper()), value) + for key, value in subject.items() + ] + ) + builder = ( + x509.CertificateBuilder() + .subject_name(subject) + .public_key(public_key) + .serial_number(serial_number or int.from_bytes(os.urandom(8), "big")) + ) + if issuer: + issuer_cert, signing_key = issuer + builder = ( + builder.issuer_name(issuer_cert.subject) + .not_valid_before(issuer_cert.not_valid_before) + .not_valid_after(issuer_cert.not_valid_after) + ) + aki_ext = x509.AuthorityKeyIdentifier( + key_identifier=issuer_cert.extensions.get_extension_for_class( + x509.SubjectKeyIdentifier + ).value.digest, + authority_cert_issuer=[x509.DirectoryName(issuer_cert.subject)], + authority_cert_serial_number=issuer_cert.serial_number, + ) + else: + signing_key = private_key + builder = ( + builder.issuer_name(subject) + .not_valid_before( + datetime.datetime.today() - datetime.timedelta(days=1) + ) + .not_valid_after( + datetime.datetime.today() + datetime.timedelta(weeks=1000) + ) + ) + aki_ext = x509.AuthorityKeyIdentifier.from_issuer_public_key( + public_key + ) + if is_issuer: + builder = ( + builder.add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=False, + ) + .add_extension( + x509.SubjectKeyIdentifier.from_public_key(public_key), + critical=False, + ) + .add_extension( + aki_ext, + critical=False, + ) + ) + else: + builder = ( + builder.add_extension( + x509.KeyUsage( + digital_signature=True, + content_commitment=False, + key_encipherment=True, + data_encipherment=False, + key_agreement=False, + key_cert_sign=False, + crl_sign=False, + encipher_only=False, + decipher_only=False, + ), + critical=False, + ) + .add_extension( + x509.BasicConstraints(ca=False, path_length=None), + critical=False, + ) + .add_extension( + x509.ExtendedKeyUsage([oid.ExtendedKeyUsageOID.SERVER_AUTH]), + critical=False, + ) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName("localhost")]), + critical=False, + ) + .add_extension( + x509.SubjectKeyIdentifier.from_public_key(public_key), + critical=False, + ) + .add_extension( + aki_ext, + critical=False, + ) + ) + certificate = builder.sign( + private_key=signing_key, + algorithm=hashes.SHA256(), + backend=backend, + ) + return certificate, private_key + + +def _write_cert(path, cert_key_pair, password=None): + certificate, private_key = cert_key_pair + if password: + encryption = serialization.BestAvailableEncryption(password) + else: + encryption = serialization.NoEncryption() + with open(path + ".key.pem", "wb") as f: + f.write( + private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=encryption, + ) + ) + with open(path + ".cert.pem", "wb") as f: + f.write( + certificate.public_bytes( + encoding=serialization.Encoding.PEM, + ) + ) + + +def new_ca(path, **subject): + cert_key_pair = _new_cert(is_issuer=True, **subject) + _write_cert(path, cert_key_pair) + return cert_key_pair + + +def new_cert( + path, ca_cert_key_pair, password=None, is_issuer=False, **subject +): + cert_key_pair = _new_cert( + issuer=ca_cert_key_pair, is_issuer=is_issuer, **subject + ) + _write_cert(path, cert_key_pair, password) + return cert_key_pair + + +def new_crl(path, issuer, cert): + issuer_cert, signing_key = issuer + revoked_cert = ( + x509.RevokedCertificateBuilder() + .serial_number(cert[0].serial_number) + .revocation_date(datetime.datetime.today()) + .build() + ) + builder = ( + x509.CertificateRevocationListBuilder() + .issuer_name(issuer_cert.subject) + .last_update(datetime.datetime.today()) + .next_update(datetime.datetime.today() + datetime.timedelta(days=1)) + .add_revoked_certificate(revoked_cert) + ) + crl = builder.sign(private_key=signing_key, algorithm=hashes.SHA256()) + with open(path + ".crl.pem", "wb") as f: + f.write(crl.public_bytes(encoding=serialization.Encoding.PEM)) + + +def main(): + ca = new_ca( + "ca", + country_name="CA", + state_or_province_name="Ontario", + locality_name="Toronto", + organization_name="MagicStack Inc.", + organizational_unit_name="asyncpg tests", + common_name="asyncpg test root ca", + email_address="hello@magic.io", + ) + server = new_cert( + "server", + ca, + country_name="CA", + state_or_province_name="Ontario", + organization_name="MagicStack Inc.", + organizational_unit_name="asyncpg tests", + common_name="localhost", + email_address="hello@magic.io", + serial_number=4096, + ) + new_crl('server', ca, server) + + +if __name__ == "__main__": + main() diff --git a/tests/certs/server.cert.pem b/tests/certs/server.cert.pem index ce8bf0f4..a4678151 100644 --- a/tests/certs/server.cert.pem +++ b/tests/certs/server.cert.pem @@ -1,40 +1,39 @@ -----BEGIN CERTIFICATE----- -MIIHFjCCBP6gAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNVBAYTAkNB +MIIG4zCCBMugAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNVBAYTAkNB MRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9N YWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5jcGcgdGVzdHMxHTAbBgNVBAMM FGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJKoZIhvcNAQkBFg5oZWxsb0BtYWdp -Yy5pbzAeFw0xNzA0MDMxNjIxMjhaFw0zNzAzMjkxNjIxMjhaMIGEMQswCQYDVQQG +Yy5pbzAeFw0yMTA5MTMxNjA2MDFaFw00MDExMTMxNjA2MDFaMIGEMQswCQYDVQQG EwJDQTEQMA4GA1UECAwHT250YXJpbzEYMBYGA1UECgwPTWFnaWNTdGFjayBJbmMu MRYwFAYDVQQLDA1hc3luY3BnIHRlc3RzMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAb BgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMIICIjANBgkqhkiG9w0BAQEFAAOC -Ag8AMIICCgKCAgEA+0WH9PX4a6Tsnp7xtUbZ51c77aqVagdfj9xYJPqD3X7u2Odf -yyYivZ91DiS23acfLOEQOfBNn2ZFcrLaXy33UAXo1VcvCsKNJY4FfS9A5OBZ4UTL -peagrTnZuRS4KMadg0V9jb5au6+s7jExPty9c+nZ59Kd6IbkPn31l9K5rj4/2WvG -pIj9k5YaXswJVBiTWGKxP9a3xMb9CG9bqNCD5kXo+1K2oDJyGE3mj6QSjlnFw6NN -f+dCOGWSs7JHMNZVVtRG2qsEIssZgpHseu9he684ZqdqrMCG6wBDW58sUBp6Dt6z -jyTLefs8ht0tT+ZcmPno2G3mgs1bLyQsQB8a7fqzzaW6wPwdZJBGO/qI7Zr/30VD -I7InLmxbg62tdrTP4CibXWfe6Qoi6xSNZd7FvP2OoCA7Nk6HahdwDocInB9fWV2j -jkqyeIdDSd9QUItCUSgyVm+XefO/T8B75PNCykyWAMMDGOBE706KZh4oXeMORoYp -LxsbtL0/7n/JPwQDHeLQHHRjiw2ydxH2/940jngnL1YCqWiUq06FPvl3zn+Qgim+ -kIhfJeYuQ8zxdh8P7Ay4i5neuum+FQZspPiSzx6jMQIOu+e+iBP2AIdu/UQK+JPU -epE2Pt5aEyuzgNEbg0cR6tQ3rJCbj0DdtU26ale5EeD8y1JYCXEYkED88bMCAwEA -AaOCAXEwggFtMAkGA1UdEwQCMAAwEQYJYIZIAYb4QgEBBAQDAgZAMDMGCWCGSAGG -+EIBDQQmFiRPcGVuU1NMIEdlbmVyYXRlZCBTZXJ2ZXIgQ2VydGlmaWNhdGUwHQYD -VR0OBBYEFHWtuEuKYLSw/iqmyBEyjcSxq0LHMIHWBgNVHSMEgc4wgcuAFOcVk1n/ -NisD3qXqtpSsWm+pXd0XoYGnpIGkMIGhMQswCQYDVQQGEwJDQTEQMA4GA1UECAwH -T250YXJpbzEQMA4GA1UEBwwHVG9yb250bzEYMBYGA1UECgwPTWFnaWNTdGFjayBJ -bmMuMRYwFAYDVQQLDA1hc3luY3BnIHRlc3RzMR0wGwYDVQQDDBRhc3luY3BnIHRl -c3Qgcm9vdCBjYTEdMBsGCSqGSIb3DQEJARYOaGVsbG9AbWFnaWMuaW+CCQD0wkk9 -2f1opzATBgNVHSUEDDAKBggrBgEFBQcDATALBgNVHQ8EBAMCBaAwDQYJKoZIhvcN -AQELBQADggIBAFUik2de0QH9gjHb0DeNRUpzHf67sVejqJoB0YCAlwhMTCwnMasR -YQVeD6+L1KCoyynhwZE99B9LlWcL5V/uR++S88azc35mFVJ6j5b0zxT6nTOQE2Oe -oOZrjhFDmdRQDyZl3oOQZD0CD2VoRZK2uGG1Isv8cC17ImC1bMNHbU+0Sc5twUtj -jLvyoJNASQess35c+V/w6rdlXy2g19vSiR3nQPsh/HMv2Kx0sJaSIVKTdVBlH3FH -o+v7tR2YRMxNw4olalxXJzvt1KgbNGczi4Yd/XnTQKCJx4xvJLhE/9R6Cj6vLeFZ -YpSp1ftXBAQCQn6lv0fMe7az3fmXRJ692514F00zmJUI6EW1wqD4yx2Q8JgqmQ4k -2oz4HBk/6Sh6Hf43KZAnLUMZ0VvkzhUTTp5/BwlhLjbWQdR6Lrf/8SRdEVzdco6F -zmawidqeQCASHKbLfFfWbh+A0mzHhkcnvczM803oX1iOnaDQVIYWqZwJxmB+bsB9 -99/yBCxJw1YGIcHss97olsx2HReCVmcUZA3TBBG/WFATYV0DlVdApEPcR6a+NWE/ -W3EhPsZhUdSzjdlP1Yt9awq+V5eHHVA/ve0PufPW6nmxIXXpIuX2YGIRqEmWWSO8 -+sKguObZvWZnj/D04GPjJTozy82vebiWGG1NODGO/4vCB0Zp/MbjYQb8 +Ag8AMIICCgKCAgEAwvenCzhPXe+m+QEOdqK1YRnhKKGAeRo0oV7BfDAwhrgrnc2R +kGg+T5liQYh3ddj13LHPdLehhVz4B1tNkfZPLSeMDwjU8sNRWkdiAI3ZHRmVIVOh +Ru4BRzI4WqdZpa5cImlFaUjtHa/+w7ekHnllwodpbjH4Vgs9LWQiH8CdTVpj2clq +H78ZShlRvLyjo6OMQ6fbxAFtcYDGHwhR7JZ4VeCBm40O0Fl/c0ckmOtoYd1BTYX9 +RgIzTt0oV6ZiUH/SKRdYyb9GPUlfm0URK5j5MZPn10riACnaNEHytEREQEkpHWiD +RPcmlRCJarg4zhObuI5f6kUX9R1XrIKY4SAyDKzoSdxRFgYEWN6HyfylakU5LFnE +4ZAgihbzuFG4fGOf88F+KqaC6yvz/mvgxB8IPSDaILE37gGuJUTGhDGkKAVIB5Xb +WWR6e4VJcnmveu1z5+M6jwTR2+61y14h3WfACZLbAdPW1ivr6kjbaXlN658NEA1G +I/5eY7kVFAapoGdLOWlI7iXLGHrORLL7l2nh7+cYnHGPT3e5WHJZ67a0Jvtv0K/5 +dBgs2gwB+6FcXe2foKAmQ3/B5rAmshtb/0Ya4wRCglGxXgQQFCZseT5TAJhhHwbB +yqVFOgzvYSFw7gXQcfxfxf0LoUYK2O7WwqDJyargkIMDTZfaL+7ht6pfSmkCAwEA +AaOCAT4wggE6MAsGA1UdDwQEAwIFoDAJBgNVHRMEAjAAMBMGA1UdJQQMMAoGCCsG +AQUFBwMBMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHQ4EFgQUE7Na2Y9wLTBC +vxuoQh8lHF/wSR0wgdUGA1UdIwSBzTCByoAUbyxV7+rCPnjz+WnoGFFqFRwH0c2h +gaekgaQwgaExCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQH +DAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFz +eW5jcGcgdGVzdHMxHTAbBgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJ +KoZIhvcNAQkBFg5oZWxsb0BtYWdpYy5pb4IIDAM+rFY5KqgwDQYJKoZIhvcNAQEL +BQADggIBAC66T8P6uZEa/0Gu7N58hHM5wZmWAgY9CWyexqLwhot43oQw53TyLYvP +EhbE41yB7kuqvwFvZmUP5/f2LiqrXbl37/vJyITr7oPmZ4cYRHLdsVOix8W8GD5e +lwrcqbudkulIcINBewfOtEuE2IkVtRSK8GzUvKcDpTOhmC1mHK/DtmgMvmGwazHy +fIHZjcUKFdOr1WZ7X8wnnEfP/OcYsizNXjGctfun/r984PhxwojoP/X+r2ycXhrr +X31m+qbj5QyarNxaje3LDA1IBCKSVYhwEHhZgXV2NBuUJYr58u053u2CcxNvHlMS +rNflhiB0MWpbTZBUBR/bnHBi5plt6eyABV4xZfslQCGisc4zWYSZqXa+HYgpn9Ja +NNbZL6Pj/hFlZg2ARlDio4KAQWjnQlS4e7U2vJXPbI/tfCMpNk+PQ7fRZFCRjWDh +OtcejGna2rBtXIHf6yuV8ultyLdIm5FqPhBE0eRisfWjhEGa2UG7IeyXs0+muLsi +n4NrZgYogo8ADOCiQtH0Z1/ropqoXlptNr8XJYYhz8rvIRXfwLqmqebp3gSD92Hd +jt4dCDmHT8ai9Inn8MqGqTtU2TlV4rba6WxNoiX2z1xbXw2kGtrdlxaYekBK+DGl +8ky4IUinTi0fUrBxLtxpPtztXPArvXSRiRTf0hRtS7v0QI9VuwyV -----END CERTIFICATE----- diff --git a/tests/certs/server.key.pem b/tests/certs/server.key.pem index d802fb3b..9c69c46c 100644 --- a/tests/certs/server.key.pem +++ b/tests/certs/server.key.pem @@ -1,51 +1,51 @@ -----BEGIN RSA PRIVATE KEY----- -MIIJKgIBAAKCAgEA+0WH9PX4a6Tsnp7xtUbZ51c77aqVagdfj9xYJPqD3X7u2Odf -yyYivZ91DiS23acfLOEQOfBNn2ZFcrLaXy33UAXo1VcvCsKNJY4FfS9A5OBZ4UTL -peagrTnZuRS4KMadg0V9jb5au6+s7jExPty9c+nZ59Kd6IbkPn31l9K5rj4/2WvG -pIj9k5YaXswJVBiTWGKxP9a3xMb9CG9bqNCD5kXo+1K2oDJyGE3mj6QSjlnFw6NN -f+dCOGWSs7JHMNZVVtRG2qsEIssZgpHseu9he684ZqdqrMCG6wBDW58sUBp6Dt6z -jyTLefs8ht0tT+ZcmPno2G3mgs1bLyQsQB8a7fqzzaW6wPwdZJBGO/qI7Zr/30VD -I7InLmxbg62tdrTP4CibXWfe6Qoi6xSNZd7FvP2OoCA7Nk6HahdwDocInB9fWV2j -jkqyeIdDSd9QUItCUSgyVm+XefO/T8B75PNCykyWAMMDGOBE706KZh4oXeMORoYp -LxsbtL0/7n/JPwQDHeLQHHRjiw2ydxH2/940jngnL1YCqWiUq06FPvl3zn+Qgim+ -kIhfJeYuQ8zxdh8P7Ay4i5neuum+FQZspPiSzx6jMQIOu+e+iBP2AIdu/UQK+JPU -epE2Pt5aEyuzgNEbg0cR6tQ3rJCbj0DdtU26ale5EeD8y1JYCXEYkED88bMCAwEA -AQKCAgEAtof0E8b7B3dvTGs6Ou2VLbD5H9VjZPqmOONgRLyXPjgPWhH6TKEPa6CC -cBvLm4jj5L46A1zFhp3MpV23tJy3o7InSZNj4PUjg7x/0EibY6h2omZPadz3q97y -grjCbxyZH9tDMcyuLNmZTg7+LyQ7nBCs8vLVMy2KcLsfxYKW0DT4PQFF9BBv5N6N -mX+u5yBTKUnIaQ+Zv6Ct/4qlkySmLIlsjeWwNP9wUqeEbaRKto4QU+Y1Tky4li9z -OoavoJKSu9jI/+BryLqxdWB74XIz5p2K40eK/qN9Xwl55PzkO+x/7n1pAvs/tQUF -GxNg70Hw0k/5DgAIC80SCFTGsG3oKLgPm1BS7Njoz8xcQvtZrYOKfEg/NOjUAWTE -SvXoLRqTQ4bUS6F6VgSA+qEEXrKFGt+ViddXrfuyXow5ZXjstgwuuZSzjLTM9LPF -tKEeB+hYbjpg0C7KuRGG5MfQ6eY8TjB3JCBGSBPw/4gv4DzkRoI2e2Qvgon6pNUT -ZiQMmuQHX3d+5QQErzUgAYF401DBi+9kG6e78hZ5uG3lTUOW372jcAkdkD/DdC1B -GMt7esIoyrO/57gFQXaFIQjSneWPiaxtYxUqpjbc0lCIfwYr3QFYzumZwUErJljl -CxDJ2ejW6ONUXDPRbzprHFDi0y71G7WRT7ZmwoQY/q/Yxwg3mAECggEBAP8+cgZl -001Np3M78KUYuUhDt+6J+ZujJnpCdsWqf0H0cxIA/FL6zpnyYP7nkum/QphE9CST -jFew1/JnuCtHHzE9BryChjL+kjXswFAhGncsP+UjPI1AEliFhPHIfBF25N3LYBvU -IO4syLLUsJsWlAaUbXBD29bSRUYwNPkspmblluZaKdS5fJQR9tiEUkNlPeUcjaMl -Mhblo4r3lZYMkJqm11DGlNUnXb5/kCMq0D+kvhVvoHfRqk5G30m9Yu0QSR6dVlgi -HiPXodNJSz0BZpfM/FXdnhAYnIANgPZS/St9JjpFeDTvo6vZgF9be+Tt29Zm7gxZ -4hvoPCUwE5LhKjMCggEBAPwEEssXK3qkrIYRe1gJjxAkpTHWj5w18QNRRsDvNIT3 -Ode2MtloeOl2csIRtvVZWEHWuFiQyvDbVXQvmoeJT+K1acw0mOOkXLME2BLOHTkJ -bYU5zd+dnF3W3CaOUagpesP7QZYiqdso88EugFDt5KtonFRtp/YNY4HxmEahE2I2 -KGVN6rFV5WIZsJyXhNCvacci1ZnJqwN/43Vx5ejsqjtypi1XAKlYzGj0ktDbOFGR -vZQdR5Q8rYQ+V7Bypwzbchq9+Udh3Xd8VmosADE0OoATDU6m1SHvsZMxZ83vcs/8 -pkwtzMlzo3q/yPSG+jTU7kq0PE8z628ol5sFZrFMmoECggEATQpHFmFDnvCSWzi7 -UMmemw49hRVGLtDWu042VUE5+elTlhqQDme/Vj4PQsEY2c6txhIB8sxKLumktHjT -4NQtuQnnb5yh7uBhtz8HaOgk+dV0T7AMBcJSBz/9uZC+yfKt77gEAUJM0jbYOQnz -aEwvT7EbOyhwQW3kFORWCOOOMj6YBl0uhRObY4HslLuTrN3xCadNpPGEJd8YNsi1 -8L1IJDW5hZr6rz+bjvUnx0WT57HM4eF4eNHi6o9/s90i79TbjQ8GUcGygTUDlido -OziiA62OeEhU/hy/l/L7et3fpnG2yR3Qw4GVUDhtA9s0EQwuL4+PyFCU68Fz7fGN -5uZpewKCAQEAvBAmHhwaPBlrDVk6XEY11mwiQoDFBmNSiZE7ZXqcDKWZKpoyc/78 -S+wyUxR5HbognHEpfB4A86AZsuxbOs2DKcELRHHzrdzXuFfjDpV1RTz91699LGQn -bfeKrdMCqKTbkiiLlwgjDQMQc5bJ9pqwTCFyl6aE8p6nJS8u3XYSSvXzSzXL764T -0RMusox3dmuQWiRqlarizWfAS8JFOX5ywo4Z6DfGrJkxYRkx/l25N1W0zTTUV5C4 -Q7lqIqhMdNHF4qLlxRkI9cN5kR1ov08kYLLW+VySLBL8xsTVm94WJZN6XdrHuYVr -94vq4F9hk89aS7EYWFp8VKVMDUkIi0KJAQKCAQEAmt1zJ9MP2xDIbNCY1Kuk3Zoy -oYcCqijK6i/9Aeu+9w8U1hSrcU5SOF4VQbDwB00RzzDPL7K4e77GulEDnanKnEpX -eu4lYuhCgG/G7uECU8jLOUQUVp8c4Fcyp29T0pTkow15TLifUOXAfQGfe8jK/SvI -jpAAwxBDwQ4HNGA3y3HOzmIt5riRLGahASxDpyTDBmFiuRPwyXNxEoO6ZMtaSL9t -ThhMc74EU8qFBBnzfaKkUZshB9jkcpQq800M99Wj5t31A4mNwz1tmAEM/Wvvbhea -Yx2I+nS6CQhg0DMAxGqalTTLWxjY4NK+j6Mb5FVpXGJ5yUef2TWVRUymm5XlSA== +MIIJKQIBAAKCAgEAwvenCzhPXe+m+QEOdqK1YRnhKKGAeRo0oV7BfDAwhrgrnc2R +kGg+T5liQYh3ddj13LHPdLehhVz4B1tNkfZPLSeMDwjU8sNRWkdiAI3ZHRmVIVOh +Ru4BRzI4WqdZpa5cImlFaUjtHa/+w7ekHnllwodpbjH4Vgs9LWQiH8CdTVpj2clq +H78ZShlRvLyjo6OMQ6fbxAFtcYDGHwhR7JZ4VeCBm40O0Fl/c0ckmOtoYd1BTYX9 +RgIzTt0oV6ZiUH/SKRdYyb9GPUlfm0URK5j5MZPn10riACnaNEHytEREQEkpHWiD +RPcmlRCJarg4zhObuI5f6kUX9R1XrIKY4SAyDKzoSdxRFgYEWN6HyfylakU5LFnE +4ZAgihbzuFG4fGOf88F+KqaC6yvz/mvgxB8IPSDaILE37gGuJUTGhDGkKAVIB5Xb +WWR6e4VJcnmveu1z5+M6jwTR2+61y14h3WfACZLbAdPW1ivr6kjbaXlN658NEA1G +I/5eY7kVFAapoGdLOWlI7iXLGHrORLL7l2nh7+cYnHGPT3e5WHJZ67a0Jvtv0K/5 +dBgs2gwB+6FcXe2foKAmQ3/B5rAmshtb/0Ya4wRCglGxXgQQFCZseT5TAJhhHwbB +yqVFOgzvYSFw7gXQcfxfxf0LoUYK2O7WwqDJyargkIMDTZfaL+7ht6pfSmkCAwEA +AQKCAgAujTM1WpyYsUAM9FOfv/nO1X8NVIJ4Z+lpHlbUcC0l/ZNsekjnUfyOxPDQ +9OSRHtyVdV8zXyUR0sDmAMbkswr0nRyz+kfeLwSdqa2ctEHC0PjqnC1F4k4r0bHi +81JUXO1iyf/ow6DaFcuer5pgLFw/tlVWGlhRMx3IWMBNFJB6h7qPpafRLK+9IY6C +ogfwanxzKwEuK6kWEMk9X58v/j19Q72uhl+jH7tuqu3yFUM3Gr0c5YEz1hKqIeQg +CXov/lUPuqNYiHMc7wgE6tjOsBfP3qDcpuSPZW7US2rH4ATr1IwcmXe+X8S2ktw8 +vv/RNJ1Z06TTKuwtenQUnJokJqvMMESqEHdld5wwDo3MxCqvkcSUeS22cKlBZjeF +8/5wqpTMVpWxE7kfZFsMinBIV3gRPh8v87aDjrULJYltLQ6e8Pd0sAO0x0jAby8H +o5mjPSjHsK0m4vJyNB0paiWJcbRMQXpKX7U3smXxxAqWaqRgkkXk6wGICxX2oV34 +T6tvQ7GPCqNR8wnnXDx07imcHGAMeT62Zo15DrupP7eRxtIaO+f94HQiM6aIIcDv +kXyNZP0B1THj1C9eFy2hy6yvVOv1ZTtaXSXCOcY5dstDDKKZiAs2JTgcMtT5AZ7H +Q0JZAulk2AIeLHlNktUOZeYAA7nrJVS+PhsPcOep5N9CeM2EgQKCAQEA5P/I9jv2 +ZLfzTLJq3h9t7HMPGC/u+a7xD6ycTA4h5LAaUoMibkMobG7Sm/idNwvjUK3eobpz +KV01L5B69Om0GlaCn1zyDvPiZ+Z6COqwlHMIQcNR8ohNyGMIzkngJPh8RJN5a1a+ +NkT+lAsxAZx4RUWOs+PboTrqy3YUWQZLbTK5k0nBoAwW6V7PmdrjDAz4AU3nabQ4 +9JXacMd1gzB7/VWFt3rprR39yfmTrT8vR1w/DRnWmYpIx/DZ1MDvkIeWdrzFakyu +ah8HkW+tFB2BajnXfD+GD/L2sdEhez9YVjv/enJrNrsPRRk6yJoUTydkqPejBOOz +DJTfdQknWBnFKwKCAQEA2fSjGu8SS/Ah3DVAdhczX8YDHfJm1ctY5/uGG5TQI8Fn +pN6jBt8e7a5Aqb32naO73xYKQipiJ30Diqp5bPXv/4SJB17qCJvvoIlWXmy6ROnC +a7ndAR7N6VgJHf7uvuPa9CCZQkpP2fG3MPJXAfynVPY+D/xonZBV/o9lioBGEin+ +ENqVYjb7tX7h0Of/IbCzbTMnmEiCaz3Mm/8RME9Mh8BZfbJTUk9Sb/Q6oTMwMd9H +GcsZj4XYbxYGdHA28mFlZoIUdDesd8ZUWka21U6GVdz4OJtfoI7MJdqRzt7uEwJC +UixWWQn+LFpNFjKjKnhFFc4re52MvKB90R+kWErMuwKCAQAp2ZkXbwPrijabmKux +JltHcAudJv1sgg0qCSiTOa32Bjz5QV/keL+FeYdh28CXk8OaNfxO4C01rQQQSj4+ +TguNGKxMhYbzNpz00gkRYhqdNpyWsCRkx4Y3jenJEXU2LHdBbRYuiK7AakGAOr9d +BQRx3HFk7Mpxn7vTLSQw1NaqATAq+7q4Dh2Nzrbv7jG6PRCB5IPbLIWQJWbDX6BZ +Nl4igSOr0XmtGqML62GSss5oIzKeqU8vxjbg22Jj4FKnvi/ASWVmtNbXLA6NBLTD +zVSeXi3EVjOg7I0rGAYfaQcy00owTYLMgMkcnqzAhnAZuyBJROB0/0v0i6x+zgpz +rln7AoIBAQCHK1TMK2ApgC8/pjboBdNynhbox6BHDumAVVkCWfQLeLKSaRCp/k3s +EZlAq/L6KMUmwUBzcF2XJ8y+fqL3lD7XNJbW32I9HJgr84CA5uVOP7q3nHkXbMc+ +474jwCrIb/8mT+E8X2HORD3cOS8EqHAOHPi4aU1oCk+Ko9vRXWQXd7t9MFJcqsTH +9nyNVpO/jRp5qrPvmWhoodb3F+TNFSDdP8lATwuljFQP4mNJ/bjx9QrfUDn17Igh +vIMcS0uIXibIv/t3Z9+qGHHP2vMgrqZZMcUvNgzEQksRXs/2gAMd/tSqqZyTc8MS +Np6AGb9fY19U+pu0+iyB/vaIbxs5NoppAoIBAQCdpwKUiaGERg7f6I8f3fy+vvYQ +RyeNbizFKSEBwyE0b9izImUuKtlOqpK2XbuFZkEXVJ8juWU/7YurMIsBdosFegPu +qxtLEq2AOBtxxRWsLWZAaaesLh6MS0YJ6YjibuK1ITfiKInIkXdc65TQ6BXXsZme +4tQmnCY+C70iG5Xnt6ImH0/FEgnyBbbTHYvFqPTxDFy5Xu0cbtRgEu6rFK5GoYur +35BGoV1tYa50y3dHR79cDYp5sPM/qZ9teEnV++dQKCRJ4oOcGsYBHqc6tEjCLWpv +ji6ZAgx0TbI3oQtECNdpT2cSvYRdSrKQth7fPVo/FhLMrmc6d18cnZswXNYQ -----END RSA PRIVATE KEY----- diff --git a/tests/test_connect.py b/tests/test_connect.py index be694d67..ab19e19c 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -9,12 +9,16 @@ import contextlib import ipaddress import os +import pathlib import platform +import shutil import ssl import stat +import sys import tempfile import textwrap import unittest +import unittest.mock import urllib.parse import weakref @@ -32,11 +36,39 @@ CERTS = os.path.join(os.path.dirname(__file__), 'certs') SSL_CA_CERT_FILE = os.path.join(CERTS, 'ca.cert.pem') +SSL_CA_CRL_FILE = os.path.join(CERTS, 'ca.crl.pem') SSL_CERT_FILE = os.path.join(CERTS, 'server.cert.pem') SSL_KEY_FILE = os.path.join(CERTS, 'server.key.pem') CLIENT_CA_CERT_FILE = os.path.join(CERTS, 'client_ca.cert.pem') CLIENT_SSL_CERT_FILE = os.path.join(CERTS, 'client.cert.pem') CLIENT_SSL_KEY_FILE = os.path.join(CERTS, 'client.key.pem') +CLIENT_SSL_PROTECTED_KEY_FILE = os.path.join(CERTS, 'client.key.protected.pem') + + +@contextlib.contextmanager +def mock_dot_postgresql(*, ca=True, crl=False, client=False, protected=False): + with tempfile.TemporaryDirectory() as temp_dir: + home = pathlib.Path(temp_dir) + pg_home = home / '.postgresql' + pg_home.mkdir() + if ca: + shutil.copyfile(SSL_CA_CERT_FILE, pg_home / 'root.crt') + if crl: + shutil.copyfile(SSL_CA_CRL_FILE, pg_home / 'root.crl') + if client: + shutil.copyfile(CLIENT_SSL_CERT_FILE, pg_home / 'postgresql.crt') + if protected: + shutil.copyfile( + CLIENT_SSL_PROTECTED_KEY_FILE, pg_home / 'postgresql.key' + ) + else: + shutil.copyfile( + CLIENT_SSL_KEY_FILE, pg_home / 'postgresql.key' + ) + with unittest.mock.patch( + 'pathlib.Path.home', unittest.mock.Mock(return_value=home) + ): + yield class TestSettings(tb.ConnectedTestCase): @@ -1155,8 +1187,10 @@ async def verify_fails(sslmode): await verify_works('allow') await verify_works('prefer') await verify_fails('require') - await verify_fails('verify-ca') - await verify_fails('verify-full') + with mock_dot_postgresql(): + await verify_fails('require') + await verify_fails('verify-ca') + await verify_fails('verify-full') async def test_connection_implicit_host(self): conn_spec = self.get_connection_spec() @@ -1177,6 +1211,9 @@ def get_server_settings(cls): 'ssl_key_file': SSL_KEY_FILE, 'ssl_ca_file': CLIENT_CA_CERT_FILE, }) + if cls.cluster.get_pg_version() >= (12, 0): + conf['ssl_min_protocol_version'] = 'TLSv1.2' + conf['ssl_max_protocol_version'] = 'TLSv1.2' return conf @@ -1263,8 +1300,7 @@ async def verify_works(sslmode, *, host='localhost'): if con: await con.close() - async def verify_fails(sslmode, *, host='localhost', - exn_type=ssl.SSLError): + async def verify_fails(sslmode, *, host='localhost', exn_type): # XXX: uvloop artifact old_handler = self.loop.get_exception_handler() con = None @@ -1286,23 +1322,29 @@ async def verify_fails(sslmode, *, host='localhost', await verify_works('allow') await verify_works('prefer') await verify_works('require') - await verify_fails('verify-ca') - await verify_fails('verify-full') + await verify_fails('verify-ca', exn_type=ValueError) + await verify_fails('verify-full', exn_type=ValueError) - orig_create_default_context = ssl.create_default_context - try: - def custom_create_default_context(*args, **kwargs): - ctx = orig_create_default_context(*args, **kwargs) - ctx.load_verify_locations(cafile=SSL_CA_CERT_FILE) - return ctx - ssl.create_default_context = custom_create_default_context + with mock_dot_postgresql(): + await verify_works('require') await verify_works('verify-ca') await verify_works('verify-ca', host='127.0.0.1') await verify_works('verify-full') await verify_fails('verify-full', host='127.0.0.1', exn_type=ssl.CertificateError) - finally: - ssl.create_default_context = orig_create_default_context + + with mock_dot_postgresql(crl=True): + await verify_fails('disable', exn_type=invalid_auth_err) + await verify_works('allow') + await verify_works('prefer') + await verify_fails('require', + exn_type=ssl.SSLError) + await verify_fails('verify-ca', + exn_type=ssl.SSLError) + await verify_fails('verify-ca', host='127.0.0.1', + exn_type=ssl.SSLError) + await verify_fails('verify-full', + exn_type=ssl.SSLError) async def test_ssl_connection_default_context(self): # XXX: uvloop artifact @@ -1366,6 +1408,48 @@ async def test_executemany_uvloop_ssl_issue_700(self): finally: await con.close() + @unittest.skipIf( + sys.version_info < (3, 7), "Python < 3.7 doesn't have ssl.TLSVersion" + ) + async def test_tls_version(self): + if self.cluster.get_pg_version() < (12, 0): + self.skipTest("PostgreSQL < 12 cannot set ssl protocol version") + + # XXX: uvloop artifact + old_handler = self.loop.get_exception_handler() + try: + self.loop.set_exception_handler(lambda *args: None) + with self.assertRaisesRegex(ssl.SSLError, 'protocol version'): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require&ssl_min_protocol_version=TLSv1.3' + ) + with self.assertRaises(ssl.SSLError): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require' + '&ssl_min_protocol_version=TLSv1.1' + '&ssl_max_protocol_version=TLSv1.1' + ) + with self.assertRaisesRegex(ssl.SSLError, 'no protocols'): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require' + '&ssl_min_protocol_version=TLSv1.2' + '&ssl_max_protocol_version=TLSv1.1' + ) + con = await self.connect( + dsn='postgresql://ssl_user@localhost/postgres?sslmode=require' + '&ssl_min_protocol_version=TLSv1.2' + '&ssl_max_protocol_version=TLSv1.2' + ) + try: + self.assertEqual(await con.fetchval('SELECT 42'), 42) + finally: + await con.close() + finally: + self.loop.set_exception_handler(old_handler) + @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') class TestClientSSLConnection(BaseTestSSLConnection): @@ -1396,41 +1480,68 @@ async def test_ssl_connection_client_auth_fails_with_wrong_setup(self): ssl=ssl_context, ) - async def test_ssl_connection_client_auth_custom_context(self): - ssl_context = ssl.create_default_context( - ssl.Purpose.SERVER_AUTH, - cafile=SSL_CA_CERT_FILE, - ) - ssl_context.load_cert_chain( - CLIENT_SSL_CERT_FILE, - keyfile=CLIENT_SSL_KEY_FILE, - ) - - con = await self.connect( - host='localhost', - user='ssl_user', - ssl=ssl_context, - ) + async def _test_works(self, **conn_args): + con = await self.connect(**conn_args) try: self.assertEqual(await con.fetchval('SELECT 42'), 42) finally: await con.close() + async def test_ssl_connection_client_auth_custom_context(self): + for key_file in (CLIENT_SSL_KEY_FILE, CLIENT_SSL_PROTECTED_KEY_FILE): + ssl_context = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH, + cafile=SSL_CA_CERT_FILE, + ) + ssl_context.load_cert_chain( + CLIENT_SSL_CERT_FILE, + keyfile=key_file, + password='secRet', + ) + await self._test_works( + host='localhost', + user='ssl_user', + ssl=ssl_context, + ) + async def test_ssl_connection_client_auth_dsn(self): - params = urllib.parse.urlencode({ + params = { 'sslrootcert': SSL_CA_CERT_FILE, 'sslcert': CLIENT_SSL_CERT_FILE, 'sslkey': CLIENT_SSL_KEY_FILE, 'sslmode': 'verify-full', - }) - dsn = 'postgres://ssl_user@localhost/postgres?' + params - con = await self.connect(dsn=dsn) - - try: - self.assertEqual(await con.fetchval('SELECT 42'), 42) - finally: - await con.close() + } + params_str = urllib.parse.urlencode(params) + dsn = 'postgres://ssl_user@localhost/postgres?' + params_str + await self._test_works(dsn=dsn) + + params['sslkey'] = CLIENT_SSL_PROTECTED_KEY_FILE + params['sslpassword'] = 'secRet' + params_str = urllib.parse.urlencode(params) + dsn = 'postgres://ssl_user@localhost/postgres?' + params_str + await self._test_works(dsn=dsn) + + async def test_ssl_connection_client_auth_env(self): + env = { + 'PGSSLROOTCERT': SSL_CA_CERT_FILE, + 'PGSSLCERT': CLIENT_SSL_CERT_FILE, + 'PGSSLKEY': CLIENT_SSL_KEY_FILE, + } + dsn = 'postgres://ssl_user@localhost/postgres?sslmode=verify-full' + with unittest.mock.patch.dict('os.environ', env): + await self._test_works(dsn=dsn) + + env['PGSSLKEY'] = CLIENT_SSL_PROTECTED_KEY_FILE + with unittest.mock.patch.dict('os.environ', env): + await self._test_works(dsn=dsn + '&sslpassword=secRet') + + async def test_ssl_connection_client_auth_dot_postgresql(self): + dsn = 'postgres://ssl_user@localhost/postgres?sslmode=verify-full' + with mock_dot_postgresql(client=True): + await self._test_works(dsn=dsn) + with mock_dot_postgresql(client=True, protected=True): + await self._test_works(dsn=dsn + '&sslpassword=secRet') @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') @@ -1460,14 +1571,15 @@ async def verify_works(sslmode, *, host='localhost'): if con: await con.close() - async def verify_fails(sslmode, *, host='localhost', - exn_type=ssl.SSLError): + async def verify_fails(sslmode, *, host='localhost'): # XXX: uvloop artifact old_handler = self.loop.get_exception_handler() con = None try: self.loop.set_exception_handler(lambda *args: None) - with self.assertRaises(exn_type): + with self.assertRaises( + asyncpg.InvalidAuthorizationSpecificationError + ): con = await self.connect( dsn='postgresql://foo/?sslmode=' + sslmode, host=host, @@ -1478,13 +1590,14 @@ async def verify_fails(sslmode, *, host='localhost', await con.close() self.loop.set_exception_handler(old_handler) - invalid_auth_err = asyncpg.InvalidAuthorizationSpecificationError await verify_works('disable') await verify_works('allow') await verify_works('prefer') - await verify_fails('require', exn_type=invalid_auth_err) - await verify_fails('verify-ca') - await verify_fails('verify-full') + await verify_fails('require') + with mock_dot_postgresql(): + await verify_fails('require') + await verify_fails('verify-ca') + await verify_fails('verify-full') async def test_nossl_connection_prefer_cancel(self): con = await self.connect( From dad269135f438e6c43fcd77b7f1ff7c0ffe59423 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 6 Nov 2021 14:02:14 -0700 Subject: [PATCH 060/193] Make sure timeout callbacks always get cleaned up (#831) Under certain circumstances the `TimerHandle` used for query timeouts does not get cleaned up in a timely fashion resulting in a temporary (`timeout` seconds) memory leak. Fixes: #830 --- asyncpg/protocol/protocol.pyx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index dbe52e9e..cf463b66 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -647,12 +647,12 @@ cdef class BaseProtocol(CoreProtocol): self.waiter.set_exception(asyncio.TimeoutError()) def _on_waiter_completed(self, fut): + if self.timeout_handle: + self.timeout_handle.cancel() + self.timeout_handle = None if fut is not self.waiter or self.cancel_waiter is not None: return if fut.cancelled(): - if self.timeout_handle: - self.timeout_handle.cancel() - self.timeout_handle = None self._request_cancel() def _create_future_fallback(self): From 0a3ae7f5bc614f4a5408cb1901f175b30944120f Mon Sep 17 00:00:00 2001 From: Bill Schnurr Date: Sat, 6 Nov 2021 14:04:18 -0700 Subject: [PATCH 061/193] Update __all__ statements to a simpler form that is better supported by typecheckers (#828) Update __all__ statements to a simpler form that's better supported by static type checkers like pyright/pylance https://github.com/microsoft/pyright/blob/main/docs/typed-libraries.md#library-interface --- asyncpg/__init__.py | 6 ++---- asyncpg/exceptions/__init__.py | 4 +++- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/asyncpg/__init__.py b/asyncpg/__init__.py index 01af7904..e8cd11eb 100644 --- a/asyncpg/__init__.py +++ b/asyncpg/__init__.py @@ -15,7 +15,5 @@ from ._version import __version__ # NOQA -__all__ = ( - ('connect', 'create_pool', 'Pool', 'Record', 'Connection') - + exceptions.__all__ # NOQA -) +__all__ = ('connect', 'create_pool', 'Pool', 'Record', 'Connection') +__all__ += exceptions.__all__ # NOQA diff --git a/asyncpg/exceptions/__init__.py b/asyncpg/exceptions/__init__.py index 446a71a8..af46a754 100644 --- a/asyncpg/exceptions/__init__.py +++ b/asyncpg/exceptions/__init__.py @@ -1040,7 +1040,7 @@ class IndexCorruptedError(InternalServerError): sqlstate = 'XX002' -__all__ = _base.__all__ + ( +__all__ = ( 'ActiveSQLTransactionError', 'AdminShutdownError', 'AmbiguousAliasError', 'AmbiguousColumnError', 'AmbiguousFunctionError', 'AmbiguousParameterError', @@ -1180,3 +1180,5 @@ class IndexCorruptedError(InternalServerError): 'WindowingError', 'WithCheckOptionViolationError', 'WrongObjectTypeError', 'ZeroLengthCharacterStringError' ) + +__all__ += _base.__all__ From 3a90fef05b87a7bc31ae51d28a6e3c118b278c7d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 6 Nov 2021 14:46:19 -0700 Subject: [PATCH 062/193] Fix test_timetz_encoding on Python 3.10 Passing `decimal.Decimal` to `datetime.combine` raises on Python 3.10 --- tests/test_codecs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_codecs.py b/tests/test_codecs.py index 0eacf4e8..e9553ebb 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1499,7 +1499,7 @@ async def test_timetz_encoding(self): await self.con.execute("SET TIME ZONE 'America/Toronto'") # Check decoding: row = await self.con.fetchrow( - 'SELECT extract(epoch from now()) AS epoch, ' + 'SELECT extract(epoch from now())::float8 AS epoch, ' 'now()::date as date, now()::timetz as time') result = datetime.datetime.combine(row['date'], row['time']) expected = datetime.datetime.fromtimestamp(row['epoch'], From 2f4fe53915b236076674dfb7c65dacefe2a00ef6 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 7 Nov 2021 08:16:13 -0800 Subject: [PATCH 063/193] Fix a bunch of ResourceWarnings in the test suite Make sure things get freed properly otherwise Python gets unhappy with ResourceWarnings. --- asyncpg/_testbase/fuzzer.py | 6 ++++++ asyncpg/protocol/protocol.pyx | 2 +- tests/test_connect.py | 13 ++++++++----- tests/test_pool.py | 3 +++ 4 files changed, 18 insertions(+), 6 deletions(-) diff --git a/asyncpg/_testbase/fuzzer.py b/asyncpg/_testbase/fuzzer.py index 5c0b870c..88745646 100644 --- a/asyncpg/_testbase/fuzzer.py +++ b/asyncpg/_testbase/fuzzer.py @@ -191,6 +191,12 @@ async def handle(self): return_when=asyncio.FIRST_COMPLETED) finally: + if self.proxy_to_backend_task is not None: + self.proxy_to_backend_task.cancel() + + if self.proxy_from_backend_task is not None: + self.proxy_from_backend_task.cancel() + # Asyncio fails to properly remove the readers and writers # when the task doing recv() or send() is cancelled, so # we must remove the readers and writers manually before diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index cf463b66..bbe8026e 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -609,7 +609,7 @@ cdef class BaseProtocol(CoreProtocol): pass finally: self.waiter = None - self.transport.abort() + self.transport.abort() def _request_cancel(self): self.cancel_waiter = self.create_future() diff --git a/tests/test_connect.py b/tests/test_connect.py index ab19e19c..cd8141fd 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1604,11 +1604,14 @@ async def test_nossl_connection_prefer_cancel(self): dsn='postgresql://foo/postgres?sslmode=prefer', host='localhost', user='ssl_user') - self.assertFalse(con._protocol.is_ssl) - with self.assertRaises(asyncio.TimeoutError): - await con.execute('SELECT pg_sleep(5)', timeout=0.5) - val = await con.fetchval('SELECT 123') - self.assertEqual(val, 123) + try: + self.assertFalse(con._protocol.is_ssl) + with self.assertRaises(asyncio.TimeoutError): + await con.execute('SELECT pg_sleep(5)', timeout=0.5) + val = await con.fetchval('SELECT 123') + self.assertEqual(val, 123) + finally: + await con.close() async def test_nossl_connection_pool(self): pool = await self.create_pool( diff --git a/tests/test_pool.py b/tests/test_pool.py index 4bf6a0c9..598baef7 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -842,6 +842,7 @@ async def test_pool_expire_connections(self): await pool.release(con) self.assertIsNone(pool._holders[0]._con) + await pool.close() async def test_pool_set_connection_args(self): pool = await self.create_pool(database='postgres', @@ -883,6 +884,8 @@ async def test_pool_set_connection_args(self): con = await pool.acquire() self.assertEqual(con.get_settings().application_name, 'set_conn_args_test_2') + await pool.release(con) + await pool.close() async def test_pool_init_race(self): pool = self.create_pool(database='postgres', min_size=1, max_size=1) From 4d39a05268ce4cc01b00458223a767542da048b8 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 7 Nov 2021 08:17:11 -0800 Subject: [PATCH 064/193] Fix SSLContext deprecation warnings `SSLContext(PROTOCOL_SSL...)` should not be used anymore. Also, silence the one test where we deliberately test TLS v1.1 --- tests/test_connect.py | 71 ++++++++++++++++++++++++------------------- 1 file changed, 40 insertions(+), 31 deletions(-) diff --git a/tests/test_connect.py b/tests/test_connect.py index cd8141fd..f0b1ca07 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -20,6 +20,7 @@ import unittest import unittest.mock import urllib.parse +import warnings import weakref import asyncpg @@ -1144,7 +1145,7 @@ def check(): @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') async def test_connection_ssl_to_no_ssl_server(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(SSL_CA_CERT_FILE) with self.assertRaisesRegex(ConnectionError, 'rejected SSL'): @@ -1268,7 +1269,7 @@ def _add_hba_entry(self): auth_method='trust') async def test_ssl_connection_custom_context(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(SSL_CA_CERT_FILE) con = await self.connect( @@ -1360,7 +1361,7 @@ async def test_ssl_connection_default_context(self): self.loop.set_exception_handler(old_handler) async def test_ssl_connection_pool(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(SSL_CA_CERT_FILE) pool = await self.create_pool( @@ -1385,7 +1386,7 @@ async def worker(): await pool.close() async def test_executemany_uvloop_ssl_issue_700(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(SSL_CA_CERT_FILE) con = await self.connect( @@ -1417,38 +1418,46 @@ async def test_tls_version(self): # XXX: uvloop artifact old_handler = self.loop.get_exception_handler() - try: - self.loop.set_exception_handler(lambda *args: None) - with self.assertRaisesRegex(ssl.SSLError, 'protocol version'): - await self.connect( - dsn='postgresql://ssl_user@localhost/postgres' - '?sslmode=require&ssl_min_protocol_version=TLSv1.3' - ) - with self.assertRaises(ssl.SSLError): - await self.connect( - dsn='postgresql://ssl_user@localhost/postgres' - '?sslmode=require' - '&ssl_min_protocol_version=TLSv1.1' - '&ssl_max_protocol_version=TLSv1.1' - ) - with self.assertRaisesRegex(ssl.SSLError, 'no protocols'): - await self.connect( + + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="ssl.TLSVersion.TLSv1_1 is deprecated", + category=DeprecationWarning + ) + try: + self.loop.set_exception_handler(lambda *args: None) + with self.assertRaisesRegex(ssl.SSLError, 'protocol version'): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require&ssl_min_protocol_version=TLSv1.3' + ) + with self.assertRaises(ssl.SSLError): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require' + '&ssl_min_protocol_version=TLSv1.1' + '&ssl_max_protocol_version=TLSv1.1' + ) + with self.assertRaisesRegex(ssl.SSLError, 'no protocols'): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require' + '&ssl_min_protocol_version=TLSv1.2' + '&ssl_max_protocol_version=TLSv1.1' + ) + con = await self.connect( dsn='postgresql://ssl_user@localhost/postgres' '?sslmode=require' '&ssl_min_protocol_version=TLSv1.2' - '&ssl_max_protocol_version=TLSv1.1' + '&ssl_max_protocol_version=TLSv1.2' ) - con = await self.connect( - dsn='postgresql://ssl_user@localhost/postgres?sslmode=require' - '&ssl_min_protocol_version=TLSv1.2' - '&ssl_max_protocol_version=TLSv1.2' - ) - try: - self.assertEqual(await con.fetchval('SELECT 42'), 42) + try: + self.assertEqual(await con.fetchval('SELECT 42'), 42) + finally: + await con.close() finally: - await con.close() - finally: - self.loop.set_exception_handler(old_handler) + self.loop.set_exception_handler(old_handler) @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') From 603e3868279cdf327aac50312ad1aa2d5c85cd5c Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 15 Nov 2021 22:12:48 -0800 Subject: [PATCH 065/193] Add Pool methods to determine its min, max, current and idle size (#849) The new `Pool.get_size()`, `Pool.get_min_size()`, `Pool.get_max_size()`, `Pool.get_idle_size()` methods are added to get the size of the current, minimum, maximum and idle connection set size at any given moment. --- asyncpg/pool.py | 34 ++++++++++++++++++++++++++++++++++ tests/test_pool.py | 21 +++++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index c868097c..69daa77d 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -118,6 +118,12 @@ def __init__(self, pool, *, max_queries, setup, max_inactive_time): self._timeout = None self._generation = None + def is_connected(self): + return self._con is not None and not self._con.is_closed() + + def is_idle(self): + return not self._in_use + async def connect(self): if self._con is not None: raise exceptions.InternalClientError( @@ -444,6 +450,34 @@ async def _initialize(self): await asyncio.gather(*connect_tasks) + def get_size(self): + """Return the current number of connections in this pool. + + .. versionadded:: 0.25.0 + """ + return sum(h.is_connected() for h in self._holders) + + def get_min_size(self): + """Return the minimum number of connections in this pool. + + .. versionadded:: 0.25.0 + """ + return self._minsize + + def get_max_size(self): + """Return the maximum allowed number of connections in this pool. + + .. versionadded:: 0.25.0 + """ + return self._maxsize + + def get_idle_size(self): + """Return the current number of idle connections in this pool. + + .. versionadded:: 0.25.0 + """ + return sum(h.is_connected() and h.is_idle() for h in self._holders) + def set_connect_args(self, dsn=None, **connect_kwargs): r"""Set the new connection arguments for this pool. diff --git a/tests/test_pool.py b/tests/test_pool.py index 598baef7..e2c99efc 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -720,6 +720,27 @@ async def test_pool_handles_inactive_connection_errors(self): await pool.close() + async def test_pool_size_and_capacity(self): + async with self.create_pool( + database='postgres', + min_size=2, + max_size=3, + ) as pool: + self.assertEqual(pool.get_min_size(), 2) + self.assertEqual(pool.get_max_size(), 3) + self.assertEqual(pool.get_size(), 2) + self.assertEqual(pool.get_idle_size(), 2) + + async with pool.acquire(): + self.assertEqual(pool.get_idle_size(), 1) + + async with pool.acquire(): + self.assertEqual(pool.get_idle_size(), 0) + + async with pool.acquire(): + self.assertEqual(pool.get_size(), 3) + self.assertEqual(pool.get_idle_size(), 0) + @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support') async def test_pool_handles_transaction_exit_in_asyncgen_1(self): pool = await self.create_pool(database='postgres', From a2a92374d13bedb4947bdbedace173a90f59e175 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 15 Nov 2021 22:13:04 -0800 Subject: [PATCH 066/193] Fix the description of the database argument to connect() (#847) The docs incorrectly claim that the `database` argument defaults to the OS username, whereas in fact, the computed value of the `user` argument is used. Fixes: #823. --- asyncpg/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index a7ec7719..21cb5e5d 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1841,7 +1841,7 @@ async def connect(dsn=None, *, If not specified, the value parsed from the *dsn* argument is used, or the value of the ``PGDATABASE`` environment variable, or the - operating system name of the user running the application. + computed value of the *user* argument. :param password: Password to be used for authentication, if the server requires From 03a3d18f2ca6fa661b9cbee0b83ba5165cdf77d9 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 15 Nov 2021 22:13:20 -0800 Subject: [PATCH 067/193] Make it possible to specify a statement name in Connection.prepare() (#846) This adds the new `name` keyword argument to `Connection.prepare()` and `PreparedStatement.get_name()` method returning the name of a statement. Some users of asyncpg might find it useful to be able to control how prepared statements are named, especially when a custom prepared statement caching scheme is in use. Specifically, This should help with pgbouncer support in SQLAlchemy asyncpg dialect. Fixes: #837. --- asyncpg/connection.py | 27 ++++++++++++++++++++++----- asyncpg/prepared_stmt.py | 8 ++++++++ tests/test_prepare.py | 11 +++++++++++ 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 21cb5e5d..ac2b5e31 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -359,8 +359,8 @@ async def _get_statement( query, timeout, *, - named: bool=False, - use_cache: bool=True, + named=False, + use_cache=True, ignore_custom_codec=False, record_class=None ): @@ -385,7 +385,9 @@ async def _get_statement( len(query) > self._config.max_cacheable_statement_size): use_cache = False - if use_cache or named: + if isinstance(named, str): + stmt_name = named + elif use_cache or named: stmt_name = self._get_unique_id('stmt') else: stmt_name = '' @@ -526,11 +528,21 @@ def cursor( record_class, ) - async def prepare(self, query, *, timeout=None, record_class=None): + async def prepare( + self, + query, + *, + name=None, + timeout=None, + record_class=None, + ): """Create a *prepared statement* for the specified query. :param str query: Text of the query to create a prepared statement for. + :param str name: + Optional name of the returned prepared statement. If not + specified, the name is auto-generated. :param float timeout: Optional timeout value in seconds. :param type record_class: @@ -544,9 +556,13 @@ async def prepare(self, query, *, timeout=None, record_class=None): .. versionchanged:: 0.22.0 Added the *record_class* parameter. + + .. versionchanged:: 0.25.0 + Added the *name* parameter. """ return await self._prepare( query, + name=name, timeout=timeout, use_cache=False, record_class=record_class, @@ -556,6 +572,7 @@ async def _prepare( self, query, *, + name=None, timeout=None, use_cache: bool=False, record_class=None @@ -564,7 +581,7 @@ async def _prepare( stmt = await self._get_statement( query, timeout, - named=True, + named=True if name is None else name, use_cache=use_cache, record_class=record_class, ) diff --git a/asyncpg/prepared_stmt.py b/asyncpg/prepared_stmt.py index eeb45367..8e241d67 100644 --- a/asyncpg/prepared_stmt.py +++ b/asyncpg/prepared_stmt.py @@ -24,6 +24,14 @@ def __init__(self, connection, query, state): state.attach() self._last_status = None + @connresource.guarded + def get_name(self) -> str: + """Return the name of this prepared statement. + + .. versionadded:: 0.25.0 + """ + return self._state.name + @connresource.guarded def get_query(self) -> str: """Return the text of the query for this prepared statement. diff --git a/tests/test_prepare.py b/tests/test_prepare.py index c441b45a..5911ccf2 100644 --- a/tests/test_prepare.py +++ b/tests/test_prepare.py @@ -600,3 +600,14 @@ async def test_prepare_does_not_use_cache(self): # prepare with disabled cache await self.con.prepare('select 1') self.assertEqual(len(cache), 0) + + async def test_prepare_explicitly_named(self): + ps = await self.con.prepare('select 1', name='foobar') + self.assertEqual(ps.get_name(), 'foobar') + self.assertEqual(await self.con.fetchval('EXECUTE foobar'), 1) + + with self.assertRaisesRegex( + exceptions.DuplicatePreparedStatementError, + 'prepared statement "foobar" already exists', + ): + await self.con.prepare('select 1', name='foobar') From f900b73714f20470a566efd4d0b93ec0a565a93f Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 15 Nov 2021 22:13:35 -0800 Subject: [PATCH 068/193] Fix parsing of IPv6 addresses in the connection URI (#845) Plain IPv6 addresses specified in square brackets in the connection URI are now parsed correctly. Fixes: #838. --- asyncpg/connect_utils.py | 20 +++++++++++++++++--- asyncpg/connection.py | 8 +++++++- tests/test_connect.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 4 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index f6f9d651..f98935f5 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -197,11 +197,25 @@ def _parse_hostlist(hostlist, port, *, unquote=False): port = _validate_port_spec(hostspecs, port) for i, hostspec in enumerate(hostspecs): - if not hostspec.startswith('/'): - addr, _, hostspec_port = hostspec.partition(':') - else: + if hostspec[0] == '/': + # Unix socket addr = hostspec hostspec_port = '' + elif hostspec[0] == '[': + # IPv6 address + m = re.match(r'(?:\[([^\]]+)\])(?::([0-9]+))?', hostspec) + if m: + addr = m.group(1) + hostspec_port = m.group(2) + else: + raise ValueError( + 'invalid IPv6 address in the connection URI: {!r}'.format( + hostspec + ) + ) + else: + # IPv4 address + addr, _, hostspec_port = hostspec.partition(':') if unquote: addr = urllib.parse.unquote(addr) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index ac2b5e31..09aa3dac 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1813,7 +1813,13 @@ async def connect(dsn=None, *, .. note:: The URI must be *valid*, which means that all components must - be properly quoted with :py:func:`urllib.parse.quote`. + be properly quoted with :py:func:`urllib.parse.quote`, and + any literal IPv6 addresses must be enclosed in square brackets. + For example: + + .. code-block:: text + + postgres://dbuser@[fe80::1ff:fe23:4567:890a%25eth0]/dbname :param host: Database host address as one of the following: diff --git a/tests/test_connect.py b/tests/test_connect.py index f0b1ca07..d66a087b 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -511,6 +511,34 @@ class TestConnectParams(tb.TestCase): }) }, + { + 'name': 'dsn_ipv6_multi_host', + 'dsn': 'postgresql://user@[2001:db8::1234%25eth0],[::1]/db', + 'result': ([('2001:db8::1234%eth0', 5432), ('::1', 5432)], { + 'database': 'db', + 'user': 'user', + }) + }, + + { + 'name': 'dsn_ipv6_multi_host_port', + 'dsn': 'postgresql://user@[2001:db8::1234]:1111,[::1]:2222/db', + 'result': ([('2001:db8::1234', 1111), ('::1', 2222)], { + 'database': 'db', + 'user': 'user', + }) + }, + + { + 'name': 'dsn_ipv6_multi_host_query_part', + 'dsn': 'postgresql:///db?user=user&host=[2001:db8::1234],[::1]', + 'result': ([('2001:db8::1234', 5432), ('::1', 5432)], { + 'database': 'db', + 'user': 'user', + }) + }, + + { 'name': 'dsn_combines_env_multi_host', 'env': { From a8fc21e0a59a6def9c903b34f8754cea58b50d98 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 07:31:59 -0800 Subject: [PATCH 069/193] Improve diagnostics of invalid executemany() input (#848) --- asyncpg/protocol/prepared_stmt.pxd | 2 +- asyncpg/protocol/prepared_stmt.pyx | 36 +++++++++++++++++++++++++----- asyncpg/protocol/protocol.pyx | 2 +- tests/test_execute.py | 29 +++++++++++++++++++----- 4 files changed, 57 insertions(+), 12 deletions(-) diff --git a/asyncpg/protocol/prepared_stmt.pxd b/asyncpg/protocol/prepared_stmt.pxd index 4427bfdc..3906af25 100644 --- a/asyncpg/protocol/prepared_stmt.pxd +++ b/asyncpg/protocol/prepared_stmt.pxd @@ -29,7 +29,7 @@ cdef class PreparedStatementState: bint have_text_cols tuple rows_codecs - cdef _encode_bind_msg(self, args) + cdef _encode_bind_msg(self, args, int seqno = ?) cpdef _init_codecs(self) cdef _ensure_rows_decoder(self) cdef _ensure_args_encoder(self) diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index 5f1820de..63466db8 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -101,12 +101,25 @@ cdef class PreparedStatementState: def mark_closed(self): self.closed = True - cdef _encode_bind_msg(self, args): + cdef _encode_bind_msg(self, args, int seqno = -1): cdef: int idx WriteBuffer writer Codec codec + if not cpython.PySequence_Check(args): + if seqno >= 0: + raise exceptions.DataError( + f'invalid input in executemany() argument sequence ' + f'element #{seqno}: expected a sequence, got ' + f'{type(args).__name__}' + ) + else: + # Non executemany() callers do not pass user input directly, + # so bad input is a bug. + raise exceptions.InternalClientError( + f'Bind: expected a sequence, got {type(args).__name__}') + if len(args) > 32767: raise exceptions.InterfaceError( 'the number of query arguments cannot exceed 32767') @@ -159,19 +172,32 @@ cdef class PreparedStatementState: except exceptions.InterfaceError as e: # This is already a descriptive error, but annotate # with argument name for clarity. + pos = f'${idx + 1}' + if seqno >= 0: + pos = ( + f'{pos} in element #{seqno} of' + f' executemany() sequence' + ) raise e.with_msg( - f'query argument ${idx + 1}: {e.args[0]}') from None + f'query argument {pos}: {e.args[0]}' + ) from None except Exception as e: # Everything else is assumed to be an encoding error # due to invalid input. + pos = f'${idx + 1}' + if seqno >= 0: + pos = ( + f'{pos} in element #{seqno} of' + f' executemany() sequence' + ) value_repr = repr(arg) if len(value_repr) > 40: value_repr = value_repr[:40] + '...' raise exceptions.DataError( - 'invalid input for query argument' - ' ${n}: {v} ({msg})'.format( - n=idx + 1, v=value_repr, msg=e)) from e + f'invalid input for query argument' + f' {pos}: {value_repr} ({e})' + ) from e if self.have_text_cols: writer.write_int16(self.cols_num) diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index bbe8026e..bb548962 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -217,7 +217,7 @@ cdef class BaseProtocol(CoreProtocol): # Make sure the argument sequence is encoded lazily with # this generator expression to keep the memory pressure under # control. - data_gen = (state._encode_bind_msg(b) for b in args) + data_gen = (state._encode_bind_msg(b, i) for i, b in enumerate(args)) arg_bufs = iter(data_gen) waiter = self._new_waiter(timeout) diff --git a/tests/test_execute.py b/tests/test_execute.py index 8cf0d2f2..78d8c124 100644 --- a/tests/test_execute.py +++ b/tests/test_execute.py @@ -9,7 +9,7 @@ import asyncpg from asyncpg import _testbase as tb -from asyncpg.exceptions import UniqueViolationError +from asyncpg import exceptions class TestExecuteScript(tb.ConnectedTestCase): @@ -140,6 +140,25 @@ async def test_executemany_basic(self): ]) async def test_executemany_bad_input(self): + with self.assertRaisesRegex( + exceptions.DataError, + r"invalid input in executemany\(\) argument sequence element #1: " + r"expected a sequence", + ): + await self.con.executemany(''' + INSERT INTO exmany (b) VALUES($1) + ''', [(0,), {1: 0}]) + + with self.assertRaisesRegex( + exceptions.DataError, + r"invalid input for query argument \$1 in element #1 of " + r"executemany\(\) sequence: 'bad'", + ): + await self.con.executemany(''' + INSERT INTO exmany (b) VALUES($1) + ''', [(0,), ("bad",)]) + + async def test_executemany_error_in_input_gen(self): bad_data = ([1 / 0] for v in range(10)) with self.assertRaises(ZeroDivisionError): @@ -155,7 +174,7 @@ async def test_executemany_bad_input(self): ''', good_data) async def test_executemany_server_failure(self): - with self.assertRaises(UniqueViolationError): + with self.assertRaises(exceptions.UniqueViolationError): await self.con.executemany(''' INSERT INTO exmany VALUES($1, $2) ''', [ @@ -165,7 +184,7 @@ async def test_executemany_server_failure(self): self.assertEqual(result, []) async def test_executemany_server_failure_after_writes(self): - with self.assertRaises(UniqueViolationError): + with self.assertRaises(exceptions.UniqueViolationError): await self.con.executemany(''' INSERT INTO exmany VALUES($1, $2) ''', [('a' * 32768, x) for x in range(10)] + [ @@ -187,7 +206,7 @@ def gen(): else: yield 'a' * 32768, pos - with self.assertRaises(UniqueViolationError): + with self.assertRaises(exceptions.UniqueViolationError): await self.con.executemany(''' INSERT INTO exmany VALUES($1, $2) ''', gen()) @@ -260,7 +279,7 @@ async def test_executemany_client_failure_in_transaction(self): async def test_executemany_client_server_failure_conflict(self): self.con._transport.set_write_buffer_limits(65536 * 64, 16384 * 64) - with self.assertRaises(UniqueViolationError): + with self.assertRaises(exceptions.UniqueViolationError): await self.con.executemany(''' INSERT INTO exmany VALUES($1, 0) ''', (('a' * 32768,) for y in range(4, -1, -1) if y / y)) From d64a44a1111c336f3c34fa75a32c2e5c6d826b74 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 10:39:36 -0800 Subject: [PATCH 070/193] Implement support for multirange types (#851) --- asyncpg/connection.py | 5 +- asyncpg/introspection.py | 125 +++++++++++++++++++++++++++- asyncpg/protocol/codecs/array.pyx | 12 --- asyncpg/protocol/codecs/base.pxd | 24 ++++-- asyncpg/protocol/codecs/base.pyx | 56 ++++++++++++- asyncpg/protocol/codecs/pgproto.pyx | 18 +++- asyncpg/protocol/codecs/range.pyx | 58 +++++++++++-- asyncpg/protocol/pgtypes.pxi | 8 ++ docs/usage.rst | 17 ++-- tests/test_codecs.py | 53 ++++++++++++ 10 files changed, 341 insertions(+), 35 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 09aa3dac..3914826a 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -94,7 +94,10 @@ def __init__(self, protocol, transport, loop, self._server_caps = _detect_server_capabilities( self._server_version, settings) - self._intro_query = introspection.INTRO_LOOKUP_TYPES + if self._server_version < (14, 0): + self._intro_query = introspection.INTRO_LOOKUP_TYPES_13 + else: + self._intro_query = introspection.INTRO_LOOKUP_TYPES self._reset_query = None self._proxy = None diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index 64508692..175e0242 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -5,7 +5,7 @@ # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 -_TYPEINFO = '''\ +_TYPEINFO_13 = '''\ ( SELECT t.oid AS oid, @@ -82,6 +82,129 @@ ''' +INTRO_LOOKUP_TYPES_13 = '''\ +WITH RECURSIVE typeinfo_tree( + oid, ns, name, kind, basetype, elemtype, elemdelim, + range_subtype, attrtypoids, attrnames, depth) +AS ( + SELECT + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, + ti.attrtypoids, ti.attrnames, 0 + FROM + {typeinfo} AS ti + WHERE + ti.oid = any($1::oid[]) + + UNION ALL + + SELECT + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, + ti.attrtypoids, ti.attrnames, tt.depth + 1 + FROM + {typeinfo} ti, + typeinfo_tree tt + WHERE + (tt.elemtype IS NOT NULL AND ti.oid = tt.elemtype) + OR (tt.attrtypoids IS NOT NULL AND ti.oid = any(tt.attrtypoids)) + OR (tt.range_subtype IS NOT NULL AND ti.oid = tt.range_subtype) +) + +SELECT DISTINCT + *, + basetype::regtype::text AS basetype_name, + elemtype::regtype::text AS elemtype_name, + range_subtype::regtype::text AS range_subtype_name +FROM + typeinfo_tree +ORDER BY + depth DESC +'''.format(typeinfo=_TYPEINFO_13) + + +_TYPEINFO = '''\ + ( + SELECT + t.oid AS oid, + ns.nspname AS ns, + t.typname AS name, + t.typtype AS kind, + (CASE WHEN t.typtype = 'd' THEN + (WITH RECURSIVE typebases(oid, depth) AS ( + SELECT + t2.typbasetype AS oid, + 0 AS depth + FROM + pg_type t2 + WHERE + t2.oid = t.oid + + UNION ALL + + SELECT + t2.typbasetype AS oid, + tb.depth + 1 AS depth + FROM + pg_type t2, + typebases tb + WHERE + tb.oid = t2.oid + AND t2.typbasetype != 0 + ) SELECT oid FROM typebases ORDER BY depth DESC LIMIT 1) + + ELSE NULL + END) AS basetype, + t.typelem AS elemtype, + elem_t.typdelim AS elemdelim, + COALESCE( + range_t.rngsubtype, + multirange_t.rngsubtype) AS range_subtype, + (CASE WHEN t.typtype = 'c' THEN + (SELECT + array_agg(ia.atttypid ORDER BY ia.attnum) + FROM + pg_attribute ia + INNER JOIN pg_class c + ON (ia.attrelid = c.oid) + WHERE + ia.attnum > 0 AND NOT ia.attisdropped + AND c.reltype = t.oid) + + ELSE NULL + END) AS attrtypoids, + (CASE WHEN t.typtype = 'c' THEN + (SELECT + array_agg(ia.attname::text ORDER BY ia.attnum) + FROM + pg_attribute ia + INNER JOIN pg_class c + ON (ia.attrelid = c.oid) + WHERE + ia.attnum > 0 AND NOT ia.attisdropped + AND c.reltype = t.oid) + + ELSE NULL + END) AS attrnames + FROM + pg_catalog.pg_type AS t + INNER JOIN pg_catalog.pg_namespace ns ON ( + ns.oid = t.typnamespace) + LEFT JOIN pg_type elem_t ON ( + t.typlen = -1 AND + t.typelem != 0 AND + t.typelem = elem_t.oid + ) + LEFT JOIN pg_range range_t ON ( + t.oid = range_t.rngtypid + ) + LEFT JOIN pg_range multirange_t ON ( + t.oid = multirange_t.rngmultitypid + ) + ) +''' + + INTRO_LOOKUP_TYPES = '''\ WITH RECURSIVE typeinfo_tree( oid, ns, name, kind, basetype, elemtype, elemdelim, diff --git a/asyncpg/protocol/codecs/array.pyx b/asyncpg/protocol/codecs/array.pyx index 3c39e49c..f8f9b8dd 100644 --- a/asyncpg/protocol/codecs/array.pyx +++ b/asyncpg/protocol/codecs/array.pyx @@ -858,19 +858,7 @@ cdef arraytext_decode(ConnectionSettings settings, FRBuffer *buf): return array_decode(settings, buf, &text_decode_ex, NULL) -cdef anyarray_decode(ConnectionSettings settings, FRBuffer *buf): - # Instances of anyarray (or any other polymorphic pseudotype) are - # never supposed to be returned from actual queries. - raise exceptions.ProtocolError( - 'unexpected instance of \'anyarray\' type') - - cdef init_array_codecs(): - register_core_codec(ANYARRAYOID, - NULL, - &anyarray_decode, - PG_FORMAT_BINARY) - # oid[] and text[] are registered as core codecs # to make type introspection query work # diff --git a/asyncpg/protocol/codecs/base.pxd b/asyncpg/protocol/codecs/base.pxd index 79d7a695..16928b88 100644 --- a/asyncpg/protocol/codecs/base.pxd +++ b/asyncpg/protocol/codecs/base.pxd @@ -23,12 +23,13 @@ ctypedef object (*codec_decode_func)(Codec codec, cdef enum CodecType: - CODEC_UNDEFINED = 0 - CODEC_C = 1 - CODEC_PY = 2 - CODEC_ARRAY = 3 - CODEC_COMPOSITE = 4 - CODEC_RANGE = 5 + CODEC_UNDEFINED = 0 + CODEC_C = 1 + CODEC_PY = 2 + CODEC_ARRAY = 3 + CODEC_COMPOSITE = 4 + CODEC_RANGE = 5 + CODEC_MULTIRANGE = 6 cdef enum ServerDataFormat: @@ -95,6 +96,9 @@ cdef class Codec: cdef encode_range(self, ConnectionSettings settings, WriteBuffer buf, object obj) + cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf, object obj) @@ -109,6 +113,8 @@ cdef class Codec: cdef decode_range(self, ConnectionSettings settings, FRBuffer *buf) + cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf) + cdef decode_composite(self, ConnectionSettings settings, FRBuffer *buf) cdef decode_in_python(self, ConnectionSettings settings, FRBuffer *buf) @@ -139,6 +145,12 @@ cdef class Codec: str schema, Codec element_codec) + @staticmethod + cdef Codec new_multirange_codec(uint32_t oid, + str name, + str schema, + Codec element_codec) + @staticmethod cdef Codec new_composite_codec(uint32_t oid, str name, diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index e4a767a9..273b27aa 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -71,6 +71,13 @@ cdef class Codec: 'range types is not supported'.format(schema, name)) self.encoder = &self.encode_range self.decoder = &self.decode_range + elif type == CODEC_MULTIRANGE: + if format != PG_FORMAT_BINARY: + raise exceptions.UnsupportedClientFeatureError( + 'cannot decode type "{}"."{}": text encoding of ' + 'range types is not supported'.format(schema, name)) + self.encoder = &self.encode_multirange + self.decoder = &self.decode_multirange elif type == CODEC_COMPOSITE: if format != PG_FORMAT_BINARY: raise exceptions.UnsupportedClientFeatureError( @@ -122,6 +129,12 @@ cdef class Codec: codec_encode_func_ex, (self.element_codec)) + cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + multirange_encode(settings, buf, obj, self.element_codec.oid, + codec_encode_func_ex, + (self.element_codec)) + cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf, object obj): cdef: @@ -209,6 +222,10 @@ cdef class Codec: return range_decode(settings, buf, codec_decode_func_ex, (self.element_codec)) + cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf): + return multirange_decode(settings, buf, codec_decode_func_ex, + (self.element_codec)) + cdef decode_composite(self, ConnectionSettings settings, FRBuffer *buf): cdef: @@ -294,7 +311,11 @@ cdef class Codec: if self.c_encoder is not NULL or self.py_encoder is not None: return True - elif self.type == CODEC_ARRAY or self.type == CODEC_RANGE: + elif ( + self.type == CODEC_ARRAY + or self.type == CODEC_RANGE + or self.type == CODEC_MULTIRANGE + ): return self.element_codec.has_encoder() elif self.type == CODEC_COMPOSITE: @@ -312,7 +333,11 @@ cdef class Codec: if self.c_decoder is not NULL or self.py_decoder is not None: return True - elif self.type == CODEC_ARRAY or self.type == CODEC_RANGE: + elif ( + self.type == CODEC_ARRAY + or self.type == CODEC_RANGE + or self.type == CODEC_MULTIRANGE + ): return self.element_codec.has_decoder() elif self.type == CODEC_COMPOSITE: @@ -358,6 +383,18 @@ cdef class Codec: None, None, None, 0) return codec + @staticmethod + cdef Codec new_multirange_codec(uint32_t oid, + str name, + str schema, + Codec element_codec): + cdef Codec codec + codec = Codec(oid) + codec.init(name, schema, 'multirange', CODEC_MULTIRANGE, + element_codec.format, PG_XFORMAT_OBJECT, NULL, NULL, + None, None, element_codec, None, None, None, 0) + return codec + @staticmethod cdef Codec new_composite_codec(uint32_t oid, str name, @@ -536,6 +573,21 @@ cdef class DataCodecConfig: self._derived_type_codecs[oid, elem_codec.format] = \ Codec.new_range_codec(oid, name, schema, elem_codec) + elif ti['kind'] == b'm': + # Multirange type + + if not range_subtype_oid: + raise exceptions.InternalClientError( + f'type record missing base type for multirange {oid}') + + elem_codec = self.get_codec(range_subtype_oid, PG_FORMAT_ANY) + if elem_codec is None: + elem_codec = self.declare_fallback_codec( + range_subtype_oid, ti['range_subtype_name'], schema) + + self._derived_type_codecs[oid, elem_codec.format] = \ + Codec.new_multirange_codec(oid, name, schema, elem_codec) + elif ti['kind'] == b'e': # Enum types are essentially text self._set_builtin_type_codec(oid, name, schema, 'scalar', diff --git a/asyncpg/protocol/codecs/pgproto.pyx b/asyncpg/protocol/codecs/pgproto.pyx index 11417d45..51d650d0 100644 --- a/asyncpg/protocol/codecs/pgproto.pyx +++ b/asyncpg/protocol/codecs/pgproto.pyx @@ -273,8 +273,9 @@ cdef init_pseudo_codecs(): FDW_HANDLEROID, TSM_HANDLEROID, INTERNALOID, OPAQUEOID, ANYELEMENTOID, ANYNONARRAYOID, ANYCOMPATIBLEOID, ANYCOMPATIBLEARRAYOID, ANYCOMPATIBLENONARRAYOID, - ANYCOMPATIBLERANGEOID, PG_DDL_COMMANDOID, INDEX_AM_HANDLEROID, - TABLE_AM_HANDLEROID, + ANYCOMPATIBLERANGEOID, ANYCOMPATIBLEMULTIRANGEOID, + ANYRANGEOID, ANYMULTIRANGEOID, ANYARRAYOID, + PG_DDL_COMMANDOID, INDEX_AM_HANDLEROID, TABLE_AM_HANDLEROID, ] register_core_codec(ANYENUMOID, @@ -330,6 +331,19 @@ cdef init_pseudo_codecs(): pgproto.bytea_decode, PG_FORMAT_BINARY) + # These two are internal to BRIN index support and are unlikely + # to be sent, but since I/O functions for these exist, add decoders + # nonetheless. + register_core_codec(PG_BRIN_BLOOM_SUMMARYOID, + NULL, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + + register_core_codec(PG_BRIN_MINMAX_MULTI_SUMMARYOID, + NULL, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + cdef init_text_codecs(): textoids = [ diff --git a/asyncpg/protocol/codecs/range.pyx b/asyncpg/protocol/codecs/range.pyx index 2f598c1b..4270d854 100644 --- a/asyncpg/protocol/codecs/range.pyx +++ b/asyncpg/protocol/codecs/range.pyx @@ -7,6 +7,8 @@ from asyncpg import types as apg_types +from collections.abc import Sequence as SequenceABC + # defined in postgresql/src/include/utils/rangetypes.h DEF RANGE_EMPTY = 0x01 # range is empty DEF RANGE_LB_INC = 0x02 # lower bound is inclusive @@ -139,11 +141,55 @@ cdef range_decode(ConnectionSettings settings, FRBuffer *buf, empty=(flags & RANGE_EMPTY) != 0) -cdef init_range_codecs(): - register_core_codec(ANYRANGEOID, - NULL, - pgproto.text_decode, - PG_FORMAT_TEXT) +cdef multirange_encode(ConnectionSettings settings, WriteBuffer buf, + object obj, uint32_t elem_oid, + encode_func_ex encoder, const void *encoder_arg): + cdef: + WriteBuffer elem_data + + if not isinstance(obj, SequenceABC): + raise TypeError( + 'expected a sequence (got type {!r})'.format(type(obj).__name__) + ) + + elem_data = WriteBuffer.new() + + for elem in obj: + range_encode(settings, elem_data, elem, elem_oid, encoder, encoder_arg) + + # Datum length + buf.write_int32(4 + elem_data.len()) + # Number of elements in multirange + buf.write_int32(len(obj)) + buf.write_buffer(elem_data) + +cdef multirange_decode(ConnectionSettings settings, FRBuffer *buf, + decode_func_ex decoder, const void *decoder_arg): + cdef: + int32_t nelems = hton.unpack_int32(frb_read(buf, 4)) + FRBuffer elem_buf + int32_t elem_len + int i + list result + + if nelems == 0: + return [] + + if nelems < 0: + raise exceptions.ProtocolError( + 'unexpected multirange size value: {}'.format(nelems)) + + result = cpython.PyList_New(nelems) + for i in range(nelems): + elem_len = hton.unpack_int32(frb_read(buf, 4)) + if elem_len == -1: + raise exceptions.ProtocolError( + 'unexpected NULL element in multirange value') + else: + frb_slice_from(&elem_buf, buf, elem_len) + elem = range_decode(settings, &elem_buf, decoder, decoder_arg) + cpython.Py_INCREF(elem) + cpython.PyList_SET_ITEM(result, i, elem) -init_range_codecs() + return result diff --git a/asyncpg/protocol/pgtypes.pxi b/asyncpg/protocol/pgtypes.pxi index d0cc22a6..e9bb782f 100644 --- a/asyncpg/protocol/pgtypes.pxi +++ b/asyncpg/protocol/pgtypes.pxi @@ -101,6 +101,10 @@ DEF JSONPATHOID = 4072 DEF REGNAMESPACEOID = 4089 DEF REGROLEOID = 4096 DEF REGCOLLATIONOID = 4191 +DEF ANYMULTIRANGEOID = 4537 +DEF ANYCOMPATIBLEMULTIRANGEOID = 4538 +DEF PG_BRIN_BLOOM_SUMMARYOID = 4600 +DEF PG_BRIN_MINMAX_MULTI_SUMMARYOID = 4601 DEF PG_MCV_LISTOID = 5017 DEF PG_SNAPSHOTOID = 5038 DEF XID8OID = 5069 @@ -116,11 +120,13 @@ BUILTIN_TYPE_OID_MAP = { ACLITEMOID: 'aclitem', ANYARRAYOID: 'anyarray', ANYCOMPATIBLEARRAYOID: 'anycompatiblearray', + ANYCOMPATIBLEMULTIRANGEOID: 'anycompatiblemultirange', ANYCOMPATIBLENONARRAYOID: 'anycompatiblenonarray', ANYCOMPATIBLEOID: 'anycompatible', ANYCOMPATIBLERANGEOID: 'anycompatiblerange', ANYELEMENTOID: 'anyelement', ANYENUMOID: 'anyenum', + ANYMULTIRANGEOID: 'anymultirange', ANYNONARRAYOID: 'anynonarray', ANYOID: 'any', ANYRANGEOID: 'anyrange', @@ -161,6 +167,8 @@ BUILTIN_TYPE_OID_MAP = { OIDOID: 'oid', OPAQUEOID: 'opaque', PATHOID: 'path', + PG_BRIN_BLOOM_SUMMARYOID: 'pg_brin_bloom_summary', + PG_BRIN_MINMAX_MULTI_SUMMARYOID: 'pg_brin_minmax_multi_summary', PG_DDL_COMMANDOID: 'pg_ddl_command', PG_DEPENDENCIESOID: 'pg_dependencies', PG_LSNOID: 'pg_lsn', diff --git a/docs/usage.rst b/docs/usage.rst index 3c835ece..a6c62b41 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -73,7 +73,12 @@ The table below shows the correspondence between PostgreSQL and Python types. +----------------------+-----------------------------------------------------+ | ``anyenum`` | :class:`str ` | +----------------------+-----------------------------------------------------+ -| ``anyrange`` | :class:`asyncpg.Range ` | +| ``anyrange`` | :class:`asyncpg.Range `, | +| | :class:`tuple ` | ++----------------------+-----------------------------------------------------+ +| ``anymultirange`` | ``list[``:class:`asyncpg.Range\ | +| | ` ``]``, | +| | ``list[``:class:`tuple ` ``]`` [#f1]_ | +----------------------+-----------------------------------------------------+ | ``record`` | :class:`asyncpg.Record`, | | | :class:`tuple `, | @@ -104,7 +109,7 @@ The table below shows the correspondence between PostgreSQL and Python types. | | :class:`ipaddress.IPv4Address\ | | | `, | | | :class:`ipaddress.IPv6Address\ | -| | ` [#f1]_ | +| | ` [#f2]_ | +----------------------+-----------------------------------------------------+ | ``macaddr`` | :class:`str ` | +----------------------+-----------------------------------------------------+ @@ -127,7 +132,7 @@ The table below shows the correspondence between PostgreSQL and Python types. | ``interval`` | :class:`datetime.timedelta \ | | | ` | +----------------------+-----------------------------------------------------+ -| ``float``, | :class:`float ` [#f2]_ | +| ``float``, | :class:`float ` [#f3]_ | | ``double precision`` | | +----------------------+-----------------------------------------------------+ | ``smallint``, | :class:`int ` | @@ -158,10 +163,12 @@ The table below shows the correspondence between PostgreSQL and Python types. All other types are encoded and decoded as text by default. -.. [#f1] Prior to version 0.20.0, asyncpg erroneously treated ``inet`` values +.. [#f1] Since version 0.25.0 + +.. [#f2] Prior to version 0.20.0, asyncpg erroneously treated ``inet`` values with prefix as ``IPvXNetwork`` instead of ``IPvXInterface``. -.. [#f2] Inexact single-precision ``float`` values may have a different +.. [#f3] Inexact single-precision ``float`` values may have a different representation when decoded into a Python float. This is inherent to the implementation of limited-precision floating point types. If you need the decimal representation to match, cast the expression diff --git a/tests/test_codecs.py b/tests/test_codecs.py index e9553ebb..918e01d5 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1042,6 +1042,59 @@ async def test_range_types(self): dic = {obj_a: 1, obj_b: 2} self.assertEqual(len(dic), count) + async def test_multirange_types(self): + """Test encoding/decoding of multirange types.""" + + if self.server_version < (14, 0): + self.skipTest("this server does not support multirange types") + + cases = [ + ('int4multirange', [ + [ + [], + [] + ], + [ + [()], + [] + ], + [ + [asyncpg.Range(empty=True)], + [] + ], + [ + [asyncpg.Range(0, 9, lower_inc=False, upper_inc=True)], + [asyncpg.Range(1, 10)] + ], + [ + [(1, 9), (9, 11)], + [asyncpg.Range(1, 12)] + ], + [ + [(1, 9), (20, 30)], + [asyncpg.Range(1, 10), asyncpg.Range(20, 31)] + ], + [ + [(None, 2)], + [asyncpg.Range(None, 3)], + ] + ]) + ] + + for (typname, sample_data) in cases: + st = await self.con.prepare( + "SELECT $1::" + typname + ) + + for sample, expected in sample_data: + with self.subTest(sample=sample, typname=typname): + result = await st.fetchval(sample) + self.assertEqual(result, expected) + + with self.assertRaisesRegex( + asyncpg.DataError, 'expected a sequence'): + await self.con.fetch("SELECT $1::int4multirange", 1) + async def test_extra_codec_alias(self): """Test encoding/decoding of a builtin non-pg_catalog codec.""" await self.con.execute(''' From 40491678598aed0d2ecc8f51a9d856739a85cb34 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 11:01:57 -0800 Subject: [PATCH 071/193] Add PostgreSQL 14 to the support matrix (#852) --- .github/workflows/tests.yml | 6 +++--- README.rst | 2 +- asyncpg/exceptions/__init__.py | 13 +++++++++++-- docs/index.rst | 3 ++- tools/generate_exceptions.py | 4 +++- 5 files changed, 20 insertions(+), 8 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index defe9d7a..f562ae32 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,13 +17,13 @@ jobs: # job. strategy: matrix: - python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc.1] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: # uvloop does not support Python 3.6 - loop: uvloop - python-version: 3.6 + python-version: "3.6" # uvloop does not support windows - loop: uvloop os: windows-latest @@ -79,7 +79,7 @@ jobs: test-postgres: strategy: matrix: - postgres-version: [9.5, 9.6, 10, 11, 12, 13] + postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14"] runs-on: ubuntu-latest diff --git a/README.rst b/README.rst index 2f5da7a4..2ed14726 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ framework. You can read more about asyncpg in an introductory `blog post `_. asyncpg requires Python 3.6 or later and is supported for PostgreSQL -versions 9.5 to 13. Older PostgreSQL versions or other databases implementing +versions 9.5 to 14. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/asyncpg/exceptions/__init__.py b/asyncpg/exceptions/__init__.py index af46a754..c5b5ccc4 100644 --- a/asyncpg/exceptions/__init__.py +++ b/asyncpg/exceptions/__init__.py @@ -337,6 +337,10 @@ class DuplicateJsonObjectKeyValueError(DataError): sqlstate = '22030' +class InvalidArgumentForSQLJsonDatetimeFunctionError(DataError): + sqlstate = '22031' + + class InvalidJsonTextError(DataError): sqlstate = '22032' @@ -872,6 +876,10 @@ class DatabaseDroppedError(OperatorInterventionError): sqlstate = '57P04' +class IdleSessionTimeoutError(OperatorInterventionError): + sqlstate = '57P05' + + class PostgresSystemError(_base.PostgresError): sqlstate = '58000' @@ -1086,8 +1094,8 @@ class IndexCorruptedError(InternalServerError): 'ForeignKeyViolationError', 'FunctionExecutedNoReturnStatementError', 'GeneratedAlwaysError', 'GroupingError', 'HeldCursorRequiresSameIsolationLevelError', - 'IdleInTransactionSessionTimeoutError', 'ImplicitZeroBitPadding', - 'InFailedSQLTransactionError', + 'IdleInTransactionSessionTimeoutError', 'IdleSessionTimeoutError', + 'ImplicitZeroBitPadding', 'InFailedSQLTransactionError', 'InappropriateAccessModeForBranchTransactionError', 'InappropriateIsolationLevelForBranchTransactionError', 'IndeterminateCollationError', 'IndeterminateDatatypeError', @@ -1098,6 +1106,7 @@ class IndexCorruptedError(InternalServerError): 'InvalidArgumentForNthValueFunctionError', 'InvalidArgumentForNtileFunctionError', 'InvalidArgumentForPowerFunctionError', + 'InvalidArgumentForSQLJsonDatetimeFunctionError', 'InvalidArgumentForWidthBucketFunctionError', 'InvalidAuthorizationSpecificationError', 'InvalidBinaryRepresentationError', 'InvalidCachedStatementError', diff --git a/docs/index.rst b/docs/index.rst index 57031b03..87c43aa8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,7 +15,8 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. **asyncpg** requires Python 3.6 or later and is supported for PostgreSQL -versions 9.5 to 13. +versions 9.5 to 14. Older PostgreSQL versions or other databases implementing +the PostgreSQL protocol *may* work, but are not being actively tested. Contents -------- diff --git a/tools/generate_exceptions.py b/tools/generate_exceptions.py index 2e7477c3..0b626558 100755 --- a/tools/generate_exceptions.py +++ b/tools/generate_exceptions.py @@ -176,9 +176,11 @@ def _add_class(clsname, base, sqlstate, docstring): buf += '\n\n\n'.join(classes) _all = textwrap.wrap(', '.join('{!r}'.format(c) for c in sorted(clsnames))) - buf += '\n\n\n__all__ = _base.__all__ + (\n {}\n)'.format( + buf += '\n\n\n__all__ = (\n {}\n)'.format( '\n '.join(_all)) + buf += '\n\n__all__ += _base.__all__' + print(buf) From 78b83f8dbaf4a0b6ce637b52442d06f8abf53873 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 13:27:06 -0800 Subject: [PATCH 072/193] Fix `test_tls_version` in some environments The kinds of errors raised on invalid TLS configuration seem to differ between platforms. --- tests/test_connect.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_connect.py b/tests/test_connect.py index d66a087b..b78f4f48 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1438,7 +1438,8 @@ async def test_executemany_uvloop_ssl_issue_700(self): await con.close() @unittest.skipIf( - sys.version_info < (3, 7), "Python < 3.7 doesn't have ssl.TLSVersion" + sys.version_info < (3, 7), + "Python < 3.7 doesn't have ssl.TLSVersion" ) async def test_tls_version(self): if self.cluster.get_pg_version() < (12, 0): @@ -1455,12 +1456,15 @@ async def test_tls_version(self): ) try: self.loop.set_exception_handler(lambda *args: None) - with self.assertRaisesRegex(ssl.SSLError, 'protocol version'): + with self.assertRaisesRegex( + ssl.SSLError, + '(protocol version)|(handshake failure)', + ): await self.connect( dsn='postgresql://ssl_user@localhost/postgres' '?sslmode=require&ssl_min_protocol_version=TLSv1.3' ) - with self.assertRaises(ssl.SSLError): + with self.assertRaises((ssl.SSLError, ConnectionResetError)): await self.connect( dsn='postgresql://ssl_user@localhost/postgres' '?sslmode=require' From 18f2aa4531dabf7bddd01fff5ebe03cfdac4d838 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 13:50:22 -0800 Subject: [PATCH 073/193] Fix compiler warnings --- asyncpg/pgproto | 2 +- asyncpg/protocol/codecs/range.pyx | 16 ++++++++++++++-- asyncpg/protocol/prepared_stmt.pyx | 4 ++-- asyncpg/protocol/protocol.pyx | 2 +- asyncpg/protocol/record/recordobj.c | 2 +- 5 files changed, 19 insertions(+), 7 deletions(-) diff --git a/asyncpg/pgproto b/asyncpg/pgproto index 1720f8af..a4178145 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit 1720f8af63725d79454884cfa787202a50eb5430 +Subproject commit a4178145cd7cc3a44eee20cfc9e8b94a7fed2053 diff --git a/asyncpg/protocol/codecs/range.pyx b/asyncpg/protocol/codecs/range.pyx index 4270d854..1038c18d 100644 --- a/asyncpg/protocol/codecs/range.pyx +++ b/asyncpg/protocol/codecs/range.pyx @@ -146,6 +146,8 @@ cdef multirange_encode(ConnectionSettings settings, WriteBuffer buf, encode_func_ex encoder, const void *encoder_arg): cdef: WriteBuffer elem_data + ssize_t elem_data_len + ssize_t elem_count if not isinstance(obj, SequenceABC): raise TypeError( @@ -157,10 +159,20 @@ cdef multirange_encode(ConnectionSettings settings, WriteBuffer buf, for elem in obj: range_encode(settings, elem_data, elem, elem_oid, encoder, encoder_arg) + elem_count = len(obj) + if elem_count > INT32_MAX: + raise OverflowError(f'too many elements in multirange value') + + elem_data_len = elem_data.len() + if elem_data_len > INT32_MAX - 4: + raise OverflowError( + f'size of encoded multirange datum exceeds the maximum allowed' + f' {INT32_MAX - 4} bytes') + # Datum length - buf.write_int32(4 + elem_data.len()) + buf.write_int32(4 + elem_data_len) # Number of elements in multirange - buf.write_int32(len(obj)) + buf.write_int32(elem_count) buf.write_buffer(elem_data) diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index 63466db8..b1f2a66d 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -151,7 +151,7 @@ cdef class PreparedStatementState: writer.write_int16(self.args_num) for idx in range(self.args_num): codec = (self.args_codecs[idx]) - writer.write_int16(codec.format) + writer.write_int16(codec.format) else: # All arguments are in binary format writer.write_int32(0x00010001) @@ -203,7 +203,7 @@ cdef class PreparedStatementState: writer.write_int16(self.cols_num) for idx in range(self.cols_num): codec = (self.rows_codecs[idx]) - writer.write_int16(codec.format) + writer.write_int16(codec.format) else: # All columns are in binary format writer.write_int32(0x00010001) diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index bb548962..3f512a81 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -38,7 +38,7 @@ from asyncpg.protocol cimport record from libc.stdint cimport int8_t, uint8_t, int16_t, uint16_t, \ int32_t, uint32_t, int64_t, uint64_t, \ - UINT32_MAX + INT32_MAX, UINT32_MAX from asyncpg.exceptions import _base as apg_exc_base from asyncpg import compat diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index e912782f..4bf34c8a 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -63,7 +63,7 @@ ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) return PyErr_NoMemory(); } o = (ApgRecordObject *)type->tp_alloc(type, size); - if (!_ApgObject_GC_IS_TRACKED(o)) { + if (!_ApgObject_GC_IS_TRACKED((PyObject *)o)) { PyErr_SetString( PyExc_TypeError, "record subclass is not tracked by GC" From 220d1d508a90a7db2a8172348561406a9316c3ca Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 13:00:01 -0800 Subject: [PATCH 074/193] Release workflow updates * Add musllinux support * Build 32-bit packages for Windows (fixes #834) * Don't ship Cython-generated *.c files in wheels --- .github/workflows/install-postgres.sh | 4 ++++ .github/workflows/release.yml | 24 ++++++++---------------- Makefile | 1 + pyproject.toml | 24 ++++++++++++++++++++++++ setup.py | 10 +++++++--- tests/__init__.py | 2 +- 6 files changed, 45 insertions(+), 20 deletions(-) create mode 100644 pyproject.toml diff --git a/.github/workflows/install-postgres.sh b/.github/workflows/install-postgres.sh index 70d42f60..c3f27186 100755 --- a/.github/workflows/install-postgres.sh +++ b/.github/workflows/install-postgres.sh @@ -38,7 +38,11 @@ elif [ "${ID}" = "centos" ]; then "postgresql${PGVERSION}-server" \ "postgresql${PGVERSION}-contrib" ln -s "/usr/pgsql-${PGVERSION}/bin/pg_config" "/usr/local/bin/pg_config" +elif [ "${ID}" = "alpine" ]; then + apk add shadow postgresql postgresql-dev postgresql-contrib else echo "install-postgres.sh: Unsupported distro: ${distro}" >&2 exit 1 fi + +useradd -m -s /bin/bash apgtest diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e388e7bb..d1e2cfd5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -74,8 +74,13 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - cibw_python: ["cp37-*", "cp38-*", "cp39-*", "cp310-*"] - cibw_arch: ["auto64"] + cibw_python: ["cp36-*", "cp37-*", "cp38-*", "cp39-*", "cp310-*"] + cibw_arch: ["auto64", "auto32"] + exclude: + - os: macos-latest + cibw_arch: "auto32" + - os: ubuntu-latest + cibw_arch: "auto32" defaults: run: @@ -90,24 +95,11 @@ jobs: fetch-depth: 50 submodules: true - - uses: pypa/cibuildwheel@v2.1.1 + - uses: pypa/cibuildwheel@v2.2.2 env: CIBW_BUILD_VERBOSITY: 1 CIBW_BUILD: ${{ matrix.cibw_python }} CIBW_ARCHS: ${{ matrix.cibw_arch }} - CIBW_BEFORE_ALL_LINUX: > - yum -y install libffi-devel - && env PGVERSION=12 .github/workflows/install-postgres.sh - && useradd -m -s /bin/bash apgtest - CIBW_TEST_EXTRAS: "test" - CIBW_TEST_COMMAND: > - python {project}/tests/__init__.py - CIBW_TEST_COMMAND_WINDOWS: > - python {project}\tests\__init__.py - CIBW_TEST_COMMAND_LINUX: > - PY=`which python` - && chmod -R go+rX "$(dirname $(dirname $(dirname $PY)))" - && su -p -l apgtest -c "$PY {project}/tests/__init__.py" - uses: actions/upload-artifact@v2 with: diff --git a/Makefile b/Makefile index 9ad5d2e7..7a09181c 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,7 @@ clean: rm -fr dist/ doc/_build/ rm -fr asyncpg/pgproto/*.c asyncpg/pgproto/*.html rm -fr asyncpg/pgproto/codecs/*.html + rm -fr asyncpg/pgproto/*.so rm -fr asyncpg/protocol/*.c asyncpg/protocol/*.html rm -fr asyncpg/protocol/*.so build *.egg-info rm -fr asyncpg/protocol/codecs/*.html diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..9e4ce7f9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,24 @@ +[project] +requires-python = ">=3.6" + +[build-system] +requires = ["setuptools>=42", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.cibuildwheel] +build-frontend = "build" +test-extras = "test" + +[tool.cibuildwheel.macos] +test-command = "python {project}/tests/__init__.py" + +[tool.cibuildwheel.windows] +test-command = "python {project}\\tests\\__init__.py" + +[tool.cibuildwheel.linux] +before-all = ".github/workflows/install-postgres.sh" +test-command = """\ + PY=`which python` \ + && chmod -R go+rX "$(dirname $(dirname $(dirname $PY)))" \ + && su -l apgtest -c "$PY {project}/tests/__init__.py" \ + """ diff --git a/setup.py b/setup.py index 4da3fb25..332bad3f 100644 --- a/setup.py +++ b/setup.py @@ -274,9 +274,13 @@ def finalize_options(self): author_email='hello@magic.io', url='https://github.com/MagicStack/asyncpg', license='Apache License, Version 2.0', - packages=['asyncpg'], - provides=['asyncpg'], - include_package_data=True, + packages=setuptools.find_packages( + exclude=['tests', 'tools'], + ), + package_data={ + # Cython sources needed for tracebacks + "": ["*.pyx", "*.pxd", "*.pxi"], + }, ext_modules=[ setuptools.extension.Extension( "asyncpg.pgproto.pgproto", diff --git a/tests/__init__.py b/tests/__init__.py index 6282ebe5..c412aff7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -18,6 +18,6 @@ def suite(): if __name__ == '__main__': - runner = unittest.runner.TextTestRunner() + runner = unittest.runner.TextTestRunner(verbosity=2) result = runner.run(suite()) sys.exit(not result.wasSuccessful()) From 7a2dfdc327b49e474b1c1e8857581a1bc7252e6f Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 13:01:34 -0800 Subject: [PATCH 075/193] asyncpg v0.25.0 Changes ------- * Improve SSL option compatibility in URIs (by @fantix in 383c711e for #827) * Add `Pool` methods to determine its min, max, current and idle size (by @elprans in 603e3868 for #849) * Make it possible to specify a statement name in `Connection.prepare()` (by @elprans in 03a3d18f for #846) * Implement support for `multirange` types (by @elprans in d64a44a1 for #851) Fixes ----- * Make sure timeout callbacks always get cleaned up (by @elprans in dad26913 for #831) * Update `__all__` statements to a simpler form that is better supported by typecheckers (by @bschnurr in 0a3ae7f5 for #828) * Fix `test_timetz_encoding` on Python 3.10 (by @elprans in 3a90fef0) * Fix a bunch of `ResourceWarnings` in the test suite (by @elprans in 2f4fe539) * Fix `SSLContext` deprecation warnings (by @elprans in 4d39a052) * Fix the description of the database argument to `connect()` (by @elprans in a2a92374 for #847) * Fix parsing of IPv6 addresses in the connection URI (by @elprans in f900b737 for #845) * Improve diagnostics of invalid `executemany()` input (by @elprans in a8fc21e0 for #848) --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 2f106a98..bd1fc082 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.24.0' +__version__ = '0.25.0' From 1259bfd172e869aded1236b15ea1dbb9f0a770c9 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 15:22:55 -0800 Subject: [PATCH 076/193] Post-release version bump --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index bd1fc082..ce45e9c8 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.25.0' +__version__ = '0.26.0.dev0' From a2f093df6aceec7842709eaf92c5ff9df093efae Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 16 Nov 2021 15:32:02 -0800 Subject: [PATCH 077/193] Unbreak test workflow (#854) --- .github/workflows/tests.yml | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f562ae32..d61573db 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -71,9 +71,9 @@ jobs: LOOP_IMPL: ${{ matrix.loop }} run: | if [ "${LOOP_IMPL}" = "uvloop" ]; then - env USE_UVLOOP=1 python setup.py test + env USE_UVLOOP=1 python -m unittest -v tests.suite else - python setup.py test + python -m unittest -v tests.suite fi test-postgres: @@ -116,21 +116,19 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 if: steps.release.outputs.version == 0 - with: - python-version: ${{ matrix.python-version }} - name: Install Python Deps if: steps.release.outputs.version == 0 run: | - python -m pip install -U pip setuptools - pip install -e .[test] + python -m pip install -U pip setuptools wheel + python -m pip install -e .[test] - name: Test if: steps.release.outputs.version == 0 env: PGVERSION: ${{ matrix.postgres-version }} run: | - python setup.py test + python -m unittest -v tests.suite # This job exists solely to act as the test job aggregate to be # targeted by branch policies. From eddb649c65fabcbe1a1b91bdc7cf2cff2a3dea60 Mon Sep 17 00:00:00 2001 From: Rongrong <15956627+Rongronggg9@users.noreply.github.com> Date: Sat, 26 Mar 2022 01:09:25 +0800 Subject: [PATCH 078/193] Fix invalid `pyproject.toml` (#900) PEP 621 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 9e4ce7f9..e6e32289 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,5 @@ [project] +name = "asyncpg" requires-python = ">=3.6" [build-system] From 2519cf386fa9ef94198744e7e311769c1fb92279 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Baltaz=C3=A1r=20Radics?= Date: Fri, 25 Mar 2022 23:21:23 +0100 Subject: [PATCH 079/193] Add record_class parameter Pool.fetch and Pool.fetchrow (#896) --- asyncpg/pool.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 69daa77d..14e4be7e 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -576,7 +576,13 @@ async def executemany(self, command: str, args, *, timeout: float=None): async with self.acquire() as con: return await con.executemany(command, args, timeout=timeout) - async def fetch(self, query, *args, timeout=None) -> list: + async def fetch( + self, + query, + *args, + timeout=None, + record_class=None + ) -> list: """Run a query and return the results as a list of :class:`Record`. Pool performs this operation using one of its connections. Other than @@ -586,7 +592,12 @@ async def fetch(self, query, *args, timeout=None) -> list: .. versionadded:: 0.10.0 """ async with self.acquire() as con: - return await con.fetch(query, *args, timeout=timeout) + return await con.fetch( + query, + *args, + timeout=timeout, + record_class=record_class + ) async def fetchval(self, query, *args, column=0, timeout=None): """Run a query and return a value in the first row. @@ -602,7 +613,7 @@ async def fetchval(self, query, *args, column=0, timeout=None): return await con.fetchval( query, *args, column=column, timeout=timeout) - async def fetchrow(self, query, *args, timeout=None): + async def fetchrow(self, query, *args, timeout=None, record_class=None): """Run a query and return the first row. Pool performs this operation using one of its connections. Other than @@ -612,7 +623,12 @@ async def fetchrow(self, query, *args, timeout=None): .. versionadded:: 0.10.0 """ async with self.acquire() as con: - return await con.fetchrow(query, *args, timeout=timeout) + return await con.fetchrow( + query, + *args, + timeout=timeout, + record_class=record_class + ) async def copy_from_table( self, From cca4a2d38f3ad6d92b7255e16dcf9a6cb6ee1e74 Mon Sep 17 00:00:00 2001 From: QuantumTM Date: Fri, 25 Mar 2022 23:03:47 +0000 Subject: [PATCH 080/193] Domain basetypes are introspected (#886) (#887) --- asyncpg/introspection.py | 2 ++ tests/test_introspection.py | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index 175e0242..d62f39a0 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -109,6 +109,7 @@ (tt.elemtype IS NOT NULL AND ti.oid = tt.elemtype) OR (tt.attrtypoids IS NOT NULL AND ti.oid = any(tt.attrtypoids)) OR (tt.range_subtype IS NOT NULL AND ti.oid = tt.range_subtype) + OR (tt.basetype IS NOT NULL AND ti.oid = tt.basetype) ) SELECT DISTINCT @@ -232,6 +233,7 @@ (tt.elemtype IS NOT NULL AND ti.oid = tt.elemtype) OR (tt.attrtypoids IS NOT NULL AND ti.oid = any(tt.attrtypoids)) OR (tt.range_subtype IS NOT NULL AND ti.oid = tt.range_subtype) + OR (tt.basetype IS NOT NULL AND ti.oid = tt.basetype) ) SELECT DISTINCT diff --git a/tests/test_introspection.py b/tests/test_introspection.py index 7de4236f..56f1d7a3 100644 --- a/tests/test_introspection.py +++ b/tests/test_introspection.py @@ -190,3 +190,26 @@ async def wait_and_drop(): DROP DOMAIN intro_2_t; ''') await slow_intro_conn.close() + + @tb.with_connection_options(database='asyncpg_intro_test') + async def test_introspection_loads_basetypes_of_domains(self): + # Test that basetypes of domains are loaded to the + # client encode/decode cache + await self.con.execute(''' + DROP TABLE IF EXISTS test; + DROP DOMAIN IF EXISTS num_array; + CREATE DOMAIN num_array numeric[]; + CREATE TABLE test ( + num num_array + ); + ''') + + try: + # if domain basetypes are not loaded, this insert will fail + await self.con.execute( + 'INSERT INTO test (num) VALUES ($1)', ([1, 2],)) + finally: + await self.con.execute(''' + DROP TABLE IF EXISTS test; + DROP DOMAIN IF EXISTS num_array; + ''') From fb3b6bf7d6557b87370eff28141e02aea6256954 Mon Sep 17 00:00:00 2001 From: Krzysztof Warunek Date: Sat, 26 Mar 2022 00:44:22 +0100 Subject: [PATCH 081/193] Add support to use awaitable object in password function. (#889) Add support to use awaitable object in password function. This will allow to pass lambda or `functools.partial` that returns `Future`. --- asyncpg/connect_utils.py | 7 +++---- tests/test_connect.py | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index f98935f5..c09bf5e0 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -757,10 +757,9 @@ async def _connect_addr( params_input = params if callable(params.password): - if inspect.iscoroutinefunction(params.password): - password = await params.password() - else: - password = params.password() + password = params.password() + if inspect.isawaitable(password): + password = await password params = params._replace(password=password) args = (addr, loop, config, connection_class, record_class, params_input) diff --git a/tests/test_connect.py b/tests/test_connect.py index b78f4f48..34ffbb34 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -282,6 +282,25 @@ async def get_wrongpassword(): user='password_user', password=get_wrongpassword) + async def test_auth_password_cleartext_callable_awaitable(self): + async def get_correctpassword(): + return 'correctpassword' + + async def get_wrongpassword(): + return 'wrongpassword' + + conn = await self.connect( + user='password_user', + password=lambda: get_correctpassword()) + await conn.close() + + with self.assertRaisesRegex( + asyncpg.InvalidPasswordError, + 'password authentication failed for user "password_user"'): + await self._try_connect( + user='password_user', + password=lambda: get_wrongpassword()) + async def test_auth_password_md5(self): conn = await self.connect( user='md5_user', password='correctpassword') From bd192623d05668b5919d55d3b9a62629594e2fb7 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 27 Mar 2022 15:14:46 -0700 Subject: [PATCH 082/193] Properly handle exceptions raised while handling server auth messages (#862) When server sends us an authentication request message and we fail to process it, we must terminate the connection and propagate the exception immediately. Currently asyncpg will just timeout waiting for `ReadyForQuery` from the server, which will never arrive. Fixes: #861 --- asyncpg/protocol/coreproto.pyx | 29 +++++++++++++++++++++-------- tests/test_connect.py | 9 +++++++-- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index e7d7c2bc..6bf1adc4 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -5,7 +5,7 @@ # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 -from hashlib import md5 as hashlib_md5 # for MD5 authentication +import hashlib include "scram.pyx" @@ -150,15 +150,28 @@ cdef class CoreProtocol: cdef _process__auth(self, char mtype): if mtype == b'R': # Authentication... - self._parse_msg_authentication() - if self.result_type != RESULT_OK: + try: + self._parse_msg_authentication() + except Exception as ex: + # Exception in authentication parsing code + # is usually either malformed authentication data + # or missing support for cryptographic primitives + # in the hashlib module. + self.result_type = RESULT_FAILED + self.result = apg_exc.InternalClientError( + f"unexpected error while performing authentication: {ex}") + self.result.__cause__ = ex self.con_status = CONNECTION_BAD self._push_result() + else: + if self.result_type != RESULT_OK: + self.con_status = CONNECTION_BAD + self._push_result() - elif self.auth_msg is not None: - # Server wants us to send auth data, so do that. - self._write(self.auth_msg) - self.auth_msg = None + elif self.auth_msg is not None: + # Server wants us to send auth data, so do that. + self._write(self.auth_msg) + self.auth_msg = None elif mtype == b'K': # BackendKeyData @@ -634,7 +647,7 @@ cdef class CoreProtocol: # 'md5' + md5(md5(password + username) + salt)) userpass = ((self.password or '') + (self.user or '')).encode('ascii') - hash = hashlib_md5(hashlib_md5(userpass).hexdigest().\ + hash = hashlib.md5(hashlib.md5(userpass).hexdigest().\ encode('ascii') + salt).hexdigest().encode('ascii') msg.write_bytestring(b'md5' + hash) diff --git a/tests/test_connect.py b/tests/test_connect.py index 34ffbb34..d90ad8a4 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -359,8 +359,13 @@ async def test_auth_password_scram_sha_256(self): await self.con.execute(alter_password) await self.con.execute("SET password_encryption = 'md5';") - async def test_auth_unsupported(self): - pass + @unittest.mock.patch('hashlib.md5', side_effect=ValueError("no md5")) + async def test_auth_md5_unsupported(self, _): + with self.assertRaisesRegex( + exceptions.InternalClientError, + ".*no md5.*", + ): + await self.connect(user='md5_user', password='correctpassword') class TestConnectParams(tb.TestCase): From f2a937d2f25d1f997a066e6ba02acc3c4de676a4 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Mon, 13 Jun 2022 16:39:43 -0400 Subject: [PATCH 083/193] Support direct TLS connections (i.e. no STARTTLS) (#923) Adding direct_tls param that when equal to True alongside the ssl param being set to a ssl.SSLContext will result in a direct SSL connection being made, skipping STARTTLS implementation. Closes #906 --- asyncpg/connect_utils.py | 21 +++++++++++++++------ asyncpg/connection.py | 6 ++++++ tests/test_connect.py | 7 ++++++- 3 files changed, 27 insertions(+), 7 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index c09bf5e0..90a61503 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -53,6 +53,7 @@ def parse(cls, sslmode): 'database', 'ssl', 'sslmode', + 'direct_tls', 'connect_timeout', 'server_settings', ]) @@ -258,7 +259,7 @@ def _dot_postgresql_path(filename) -> pathlib.Path: def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, - connect_timeout, server_settings): + direct_tls, connect_timeout, server_settings): # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. auth_hosts = None @@ -601,8 +602,8 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, - sslmode=sslmode, connect_timeout=connect_timeout, - server_settings=server_settings) + sslmode=sslmode, direct_tls=direct_tls, + connect_timeout=connect_timeout, server_settings=server_settings) return addrs, params @@ -612,7 +613,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, statement_cache_size, max_cached_statement_lifetime, max_cacheable_statement_size, - ssl, server_settings): + ssl, direct_tls, server_settings): local_vars = locals() for var_name in {'max_cacheable_statement_size', @@ -640,8 +641,8 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, addrs, params = _parse_connect_dsn_and_args( dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, ssl=ssl, - database=database, connect_timeout=timeout, - server_settings=server_settings) + direct_tls=direct_tls, database=database, + connect_timeout=timeout, server_settings=server_settings) config = _ClientConfiguration( command_timeout=command_timeout, @@ -812,6 +813,14 @@ async def __connect_addr( if isinstance(addr, str): # UNIX socket connector = loop.create_unix_connection(proto_factory, addr) + + elif params.ssl and params.direct_tls: + # if ssl and direct_tls are given, skip STARTTLS and perform direct + # SSL connection + connector = loop.create_connection( + proto_factory, *addr, ssl=params.ssl + ) + elif params.ssl: connector = _create_ssl_connection( proto_factory, *addr, loop=loop, ssl_context=params.ssl, diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 3914826a..3327360b 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1789,6 +1789,7 @@ async def connect(dsn=None, *, max_cacheable_statement_size=1024 * 15, command_timeout=None, ssl=None, + direct_tls=False, connection_class=Connection, record_class=protocol.Record, server_settings=None): @@ -1984,6 +1985,10 @@ async def connect(dsn=None, *, ... await con.close() >>> asyncio.run(run()) + :param bool direct_tls: + Pass ``True`` to skip PostgreSQL STARTTLS mode and perform a direct + SSL connection. Must be used alongside ``ssl`` param. + :param dict server_settings: An optional dict of server runtime parameters. Refer to PostgreSQL documentation for @@ -2094,6 +2099,7 @@ async def connect(dsn=None, *, password=password, passfile=passfile, ssl=ssl, + direct_tls=direct_tls, database=database, server_settings=server_settings, command_timeout=command_timeout, diff --git a/tests/test_connect.py b/tests/test_connect.py index d90ad8a4..db7817f6 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -811,7 +811,8 @@ def run_testcase(self, testcase): addrs, params = connect_utils._parse_connect_dsn_and_args( dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, database=database, ssl=sslmode, - connect_timeout=None, server_settings=server_settings) + direct_tls=False, connect_timeout=None, + server_settings=server_settings) params = { k: v for k, v in params._asdict().items() @@ -829,6 +830,10 @@ def run_testcase(self, testcase): # unless explicitly tested for. params.pop('ssl', None) params.pop('sslmode', None) + if 'direct_tls' not in expected[1]: + # Avoid the hassle of specifying direct_tls + # unless explicitly tested for + params.pop('direct_tls', False) self.assertEqual(expected, result, 'Testcase: {}'.format(testcase)) From d502abb7ed2307c6220649590aa74a3455d5e06c Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 7 Jul 2022 11:55:06 -0700 Subject: [PATCH 084/193] Document direct_tls support version --- asyncpg/connection.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 3327360b..ea128aab 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2065,6 +2065,9 @@ async def connect(dsn=None, *, in the *dsn* argument now have consistent default values of files under ``~/.postgresql/`` as libpq. + .. versionchanged:: 0.26.0 + Added the *direct_tls* parameter. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context From 156216f192d6d7bab17fbac8ffd52c6d0960c280 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 7 Jul 2022 11:57:19 -0700 Subject: [PATCH 085/193] Remove "project" section from pyproject.toml Rely on metadata inferred from `setup.py` instead for now --- pyproject.toml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e6e32289..71beae87 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,3 @@ -[project] -name = "asyncpg" -requires-python = ">=3.6" - [build-system] requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" From 9825bbb61140e60489b8d5649a288d1f67c0ef9f Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 7 Jul 2022 12:02:35 -0700 Subject: [PATCH 086/193] asyncpg v0.26.0 Changes ------- * Add support to use awaitable object in password function. (#889) (by @kwarunek in fb3b6bf7 for #889) * Support direct TLS connections (i.e. no STARTTLS) (#923) (by @jackwotherspoon in f2a937d2 for #923) Fixes ----- * Fix invalid `pyproject.toml` (#900) (by @Rongronggg9 in eddb649c for #900) * Add record_class parameter Pool.fetch and Pool.fetchrow (#896) (by @baltitenger in 2519cf38 for #896) * Domain basetypes are introspected (#886) (#887) (by @QuantumTM in cca4a2d3 for #886) * Properly handle exceptions raised while handling server auth messages (#862) (by @elprans in bd192623 for #862) --- .github/workflows/release.yml | 2 +- asyncpg/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d1e2cfd5..e984a351 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -95,7 +95,7 @@ jobs: fetch-depth: 50 submodules: true - - uses: pypa/cibuildwheel@v2.2.2 + - uses: pypa/cibuildwheel@v2.8.0 env: CIBW_BUILD_VERBOSITY: 1 CIBW_BUILD: ${{ matrix.cibw_python }} diff --git a/asyncpg/_version.py b/asyncpg/_version.py index ce45e9c8..7e897c90 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.26.0.dev0' +__version__ = '0.26.0' From 7bd6c49f8a58d02ae5a9567d4303f9a8d17b513e Mon Sep 17 00:00:00 2001 From: ddelange <14880945+ddelange@users.noreply.github.com> Date: Tue, 27 Sep 2022 21:57:03 +0200 Subject: [PATCH 087/193] Add arm64 mac and linux wheels (#954) --- .github/workflows/install-postgres.sh | 9 +++-- .github/workflows/release.yml | 52 +++++++++++++++++++-------- tests/test_introspection.py | 2 +- 3 files changed, 45 insertions(+), 18 deletions(-) diff --git a/.github/workflows/install-postgres.sh b/.github/workflows/install-postgres.sh index c3f27186..4ffbb4d6 100755 --- a/.github/workflows/install-postgres.sh +++ b/.github/workflows/install-postgres.sh @@ -27,11 +27,16 @@ if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then apt-get install -y --no-install-recommends \ "postgresql-${PGVERSION}" \ "postgresql-contrib-${PGVERSION}" +elif [ "${ID}" = "almalinux" ]; then + yum install -y \ + "postgresql-server" \ + "postgresql-devel" \ + "postgresql-contrib" elif [ "${ID}" = "centos" ]; then - el="EL-${VERSION_ID}-$(arch)" + el="EL-${VERSION_ID%.*}-$(arch)" baseurl="https://download.postgresql.org/pub/repos/yum/reporpms" yum install -y "${baseurl}/${el}/pgdg-redhat-repo-latest.noarch.rpm" - if [ ${VERSION_ID} -ge 8 ]; then + if [ ${VERSION_ID%.*} -ge 8 ]; then dnf -qy module disable postgresql fi yum install -y \ diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e984a351..01b97a84 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -56,7 +56,7 @@ jobs: submodules: true - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 - name: Build source distribution run: | @@ -68,19 +68,35 @@ jobs: name: dist path: dist/*.tar.* - build-wheels: + build-wheels-matrix: needs: validate-release-request + runs-on: ubuntu-latest + outputs: + include: ${{ steps.set-matrix.outputs.include }} + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v4 + - run: pip install cibuildwheel==2.10.2 + - id: set-matrix + run: | + MATRIX_INCLUDE=$( + { + cibuildwheel --print-build-identifiers --platform linux --arch x86_64,aarch64 | grep cp | jq -Rc '{"only": inputs, "os": "ubuntu-latest"}' \ + && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,arm64 | grep cp | jq -Rc '{"only": inputs, "os": "macos-latest"}' \ + && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -Rc '{"only": inputs, "os": "windows-latest"}' + } | jq -sc + ) + echo ::set-output name=include::"$MATRIX_INCLUDE" + + build-wheels: + needs: build-wheels-matrix runs-on: ${{ matrix.os }} + continue-on-error: true + name: Build ${{ matrix.only }} + strategy: matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - cibw_python: ["cp36-*", "cp37-*", "cp38-*", "cp39-*", "cp310-*"] - cibw_arch: ["auto64", "auto32"] - exclude: - - os: macos-latest - cibw_arch: "auto32" - - os: ubuntu-latest - cibw_arch: "auto32" + include: ${{ fromJson(needs.build-wheels-matrix.outputs.include) }} defaults: run: @@ -94,12 +110,18 @@ jobs: with: fetch-depth: 50 submodules: true + + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v2 - - uses: pypa/cibuildwheel@v2.8.0 + - uses: pypa/cibuildwheel@v2.10.2 + with: + only: ${{ matrix.only }} env: CIBW_BUILD_VERBOSITY: 1 - CIBW_BUILD: ${{ matrix.cibw_python }} - CIBW_ARCHS: ${{ matrix.cibw_arch }} + CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28 + CIBW_MANYLINUX_AARCH64_IMAGE: manylinux_2_28 - uses: actions/upload-artifact@v2 with: @@ -107,7 +129,7 @@ jobs: path: wheelhouse/*.whl publish-docs: - needs: validate-release-request + needs: [build-sdist, build-wheels] runs-on: ubuntu-latest env: @@ -121,7 +143,7 @@ jobs: submodules: true - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 diff --git a/tests/test_introspection.py b/tests/test_introspection.py index 56f1d7a3..78561dd0 100644 --- a/tests/test_introspection.py +++ b/tests/test_introspection.py @@ -12,7 +12,7 @@ from asyncpg import connection as apg_con -MAX_RUNTIME = 0.1 +MAX_RUNTIME = 0.25 class SlowIntrospectionConnection(apg_con.Connection): From 5f908e679a6264c5fcf8a92895a2f34a9387e4da Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 27 Sep 2022 13:44:08 -0700 Subject: [PATCH 088/193] Add Python 3.11 to the test matrix (#948) --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d61573db..3a95aaf3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11.0-rc.2"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: From 40b16ea65f8b634a392e7e5b6509ee7dda45c4cd Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Tue, 4 Oct 2022 10:28:31 -0700 Subject: [PATCH 089/193] Exclude .venv from flake8 (#958) Virtual environment directories are often named `.venv` by convention. --- .flake8 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.flake8 b/.flake8 index 9697fc96..3a8b87a8 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,3 @@ [flake8] ignore = E402,E731,W503,W504,E252 -exclude = .git,__pycache__,build,dist,.eggs,.github,.local +exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv From 0e73fec27884d94d8205f2d0a71dc74b5ec6fc49 Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Tue, 11 Oct 2022 11:29:23 -0700 Subject: [PATCH 090/193] Upgrade to flake8 5.0.4 (from 3.9.2) (#961) This moves the project to a more modern version of flake8 (and its dependencies). No new lint issues were identified by this upgrade. --- setup.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 332bad3f..78cc0fd3 100644 --- a/setup.py +++ b/setup.py @@ -29,11 +29,7 @@ # Minimal dependencies required to test asyncpg. TEST_DEPENDENCIES = [ - # pycodestyle is a dependency of flake8, but it must be frozen because - # their combination breaks too often - # (example breakage: https://gitlab.com/pycqa/flake8/issues/427) - 'pycodestyle~=2.7.0', - 'flake8~=3.9.2', + 'flake8~=5.0.4', 'uvloop>=0.15.3; platform_system != "Windows" and python_version >= "3.7"', ] From 84c99bfda3885bbbf952e861bc315e9fb4443454 Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Tue, 11 Oct 2022 14:10:27 -0700 Subject: [PATCH 091/193] Show an example of a custom Record class (#960) This demonstrates a dot-notation implementation as suggested by this FAQ item. --- docs/faq.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/faq.rst b/docs/faq.rst index 664e49bd..52e5f9e3 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -34,6 +34,12 @@ class that implements dot-notation via the ``record_class`` argument to :func:`connect() ` or any of the Record-returning methods. +.. code-block:: python + + class MyRecord(asyncpg.Record): + def __getattr__(self, name): + return self[name] + Why can't I use a :ref:`cursor ` outside of a transaction? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From eccdf61afb0116f9500f6fb2f832058ba8eb463e Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Tue, 11 Oct 2022 15:35:48 -0700 Subject: [PATCH 092/193] Use the exact type name in Record.__repr__ (#959) We support Record subclasses, so include the exact type name (rather than just 'Record') in the repr() string. --- asyncpg/protocol/record/recordobj.c | 29 +++++++++++++++++++++++++---- tests/test_record.py | 1 + 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index 4bf34c8a..c0049217 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -451,16 +451,31 @@ record_subscript(ApgRecordObject* o, PyObject* item) } +static const char * +get_typename(PyTypeObject *type) +{ + assert(type->tp_name != NULL); + const char *s = strrchr(type->tp_name, '.'); + if (s == NULL) { + s = type->tp_name; + } + else { + s++; + } + return s; +} + + static PyObject * record_repr(ApgRecordObject *v) { Py_ssize_t i, n; - PyObject *keys_iter; + PyObject *keys_iter, *type_prefix; _PyUnicodeWriter writer; n = Py_SIZE(v); if (n == 0) { - return PyUnicode_FromString(""); + return PyUnicode_FromFormat("<%s>", get_typename(Py_TYPE(v))); } keys_iter = PyObject_GetIter(v->desc->keys); @@ -471,16 +486,22 @@ record_repr(ApgRecordObject *v) i = Py_ReprEnter((PyObject *)v); if (i != 0) { Py_DECREF(keys_iter); - return i > 0 ? PyUnicode_FromString("") : NULL; + if (i > 0) { + return PyUnicode_FromFormat("<%s ...>", get_typename(Py_TYPE(v))); + } + return NULL; } _PyUnicodeWriter_Init(&writer); writer.overallocate = 1; writer.min_length = 12; /* */ - if (_PyUnicodeWriter_WriteASCIIString(&writer, "") self.assertEqual(list(r.items()), [('a', 1), ('b', '2')]) self.assertEqual(list(r.keys()), ['a', 'b']) From bb0cb39de43964599f944c51a35edc4df5cbd6fb Mon Sep 17 00:00:00 2001 From: Bryan Forbes Date: Wed, 26 Oct 2022 14:34:12 -0500 Subject: [PATCH 093/193] Drop Python 3.6 support (#940) --- .github/workflows/tests.yml | 5 +---- README.rst | 2 +- asyncpg/compat.py | 10 ---------- asyncpg/connect_utils.py | 10 +--------- asyncpg/connection.py | 3 +-- asyncpg/pool.py | 4 ---- asyncpg/protocol/scram.pyx | 11 ++--------- docs/index.rst | 2 +- setup.py | 9 ++++----- tests/test_connect.py | 5 ----- tests/test_pool.py | 4 ---- 11 files changed, 11 insertions(+), 54 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3a95aaf3..f451cff8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,13 +17,10 @@ jobs: # job. strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11.0-rc.2"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-rc.2"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: - # uvloop does not support Python 3.6 - - loop: uvloop - python-version: "3.6" # uvloop does not support windows - loop: uvloop os: windows-latest diff --git a/README.rst b/README.rst index 2ed14726..01a28c00 100644 --- a/README.rst +++ b/README.rst @@ -13,7 +13,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. You can read more about asyncpg in an introductory `blog post `_. -asyncpg requires Python 3.6 or later and is supported for PostgreSQL +asyncpg requires Python 3.7 or later and is supported for PostgreSQL versions 9.5 to 14. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 348b8caa..29b8e16e 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -8,10 +8,8 @@ import asyncio import pathlib import platform -import sys -PY_37 = sys.version_info >= (3, 7) SYSTEM = platform.uname().system @@ -36,14 +34,6 @@ def get_pg_home_directory() -> pathlib.Path: return pathlib.Path.home() -if PY_37: - def current_asyncio_task(loop): - return asyncio.current_task(loop) -else: - def current_asyncio_task(loop): - return asyncio.Task.current_task(loop) - - async def wait_closed(stream): # Not all asyncio versions have StreamWriter.wait_closed(). if hasattr(stream, 'wait_closed'): diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 90a61503..40905edf 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -237,10 +237,6 @@ def _parse_hostlist(hostlist, port, *, unquote=False): def _parse_tls_version(tls_version): - if not hasattr(ssl_module, 'TLSVersion'): - raise ValueError( - "TLSVersion is not supported in this version of Python" - ) if tls_version.startswith('SSL'): raise ValueError( f"Unsupported TLS version: {tls_version}" @@ -573,11 +569,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, ssl_min_protocol_version ) else: - try: - ssl.minimum_version = _parse_tls_version('TLSv1.2') - except ValueError: - # Python 3.6 does not have ssl.TLSVersion - pass + ssl.minimum_version = _parse_tls_version('TLSv1.2') if ssl_max_protocol_version is None: ssl_max_protocol_version = os.getenv('PGSSLMAXPROTOCOLVERSION') diff --git a/asyncpg/connection.py b/asyncpg/connection.py index ea128aab..365ab416 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -20,7 +20,6 @@ import warnings import weakref -from . import compat from . import connect_utils from . import cursor from . import exceptions @@ -1468,7 +1467,7 @@ async def _cancel(self, waiter): waiter.set_exception(ex) finally: self._cancellations.discard( - compat.current_asyncio_task(self._loop)) + asyncio.current_task(self._loop)) if not waiter.done(): waiter.set_result(None) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 14e4be7e..9bd2a3e3 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -43,10 +43,6 @@ def __new__(mcls, name, bases, dct, *, wrap=False): return super().__new__(mcls, name, bases, dct) - def __init__(cls, name, bases, dct, *, wrap=False): - # Needed for Python 3.5 to handle `wrap` class keyword argument. - super().__init__(name, bases, dct) - @staticmethod def _wrap_connection_method(meth_name): def call_con_method(self, *args, **kwargs): diff --git a/asyncpg/protocol/scram.pyx b/asyncpg/protocol/scram.pyx index bfb82f73..765ddd46 100644 --- a/asyncpg/protocol/scram.pyx +++ b/asyncpg/protocol/scram.pyx @@ -9,18 +9,11 @@ import base64 import hashlib import hmac import re +import secrets import stringprep import unicodedata -# try to import the secrets library from Python 3.6+ for the -# cryptographic token generator for generating nonces as part of SCRAM -# Otherwise fall back on os.urandom -try: - from secrets import token_bytes as generate_token_bytes -except ImportError: - from os import urandom as generate_token_bytes - @cython.final cdef class SCRAMAuthentication: """Contains the protocol for generating and a SCRAM hashed password. @@ -198,7 +191,7 @@ cdef class SCRAMAuthentication: cdef: bytes token - token = generate_token_bytes(num_bytes) + token = secrets.token_bytes(num_bytes) return base64.b64encode(token) diff --git a/docs/index.rst b/docs/index.rst index 87c43aa8..ee9f85d4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,7 +14,7 @@ PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. -**asyncpg** requires Python 3.6 or later and is supported for PostgreSQL +**asyncpg** requires Python 3.7 or later and is supported for PostgreSQL versions 9.5 to 14. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/setup.py b/setup.py index 78cc0fd3..af0bcdc3 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ import sys -if sys.version_info < (3, 6): - raise RuntimeError('asyncpg requires Python 3.6 or greater') +if sys.version_info < (3, 7): + raise RuntimeError('asyncpg requires Python 3.7 or greater') import os import os.path @@ -30,7 +30,7 @@ # Minimal dependencies required to test asyncpg. TEST_DEPENDENCIES = [ 'flake8~=5.0.4', - 'uvloop>=0.15.3; platform_system != "Windows" and python_version >= "3.7"', + 'uvloop>=0.15.3; platform_system != "Windows"', ] # Dependencies required to build documentation. @@ -255,7 +255,6 @@ def finalize_options(self): 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', @@ -264,7 +263,7 @@ def finalize_options(self): 'Topic :: Database :: Front-Ends', ], platforms=['macOS', 'POSIX', 'Windows'], - python_requires='>=3.6.0', + python_requires='>=3.7.0', zip_safe=False, author='MagicStack Inc', author_email='hello@magic.io', diff --git a/tests/test_connect.py b/tests/test_connect.py index db7817f6..c8da7e29 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -14,7 +14,6 @@ import shutil import ssl import stat -import sys import tempfile import textwrap import unittest @@ -1466,10 +1465,6 @@ async def test_executemany_uvloop_ssl_issue_700(self): finally: await con.close() - @unittest.skipIf( - sys.version_info < (3, 7), - "Python < 3.7 doesn't have ssl.TLSVersion" - ) async def test_tls_version(self): if self.cluster.get_pg_version() < (12, 0): self.skipTest("PostgreSQL < 12 cannot set ssl protocol version") diff --git a/tests/test_pool.py b/tests/test_pool.py index e2c99efc..b77783e9 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -10,7 +10,6 @@ import os import platform import random -import sys import textwrap import time import unittest @@ -741,7 +740,6 @@ async def test_pool_size_and_capacity(self): self.assertEqual(pool.get_size(), 3) self.assertEqual(pool.get_idle_size(), 0) - @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support') async def test_pool_handles_transaction_exit_in_asyncgen_1(self): pool = await self.create_pool(database='postgres', min_size=1, max_size=1) @@ -763,7 +761,6 @@ class MyException(Exception): async for _ in iterate(con): # noqa raise MyException() - @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support') async def test_pool_handles_transaction_exit_in_asyncgen_2(self): pool = await self.create_pool(database='postgres', min_size=1, max_size=1) @@ -788,7 +785,6 @@ class MyException(Exception): del iterator - @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support') async def test_pool_handles_asyncgen_finalization(self): pool = await self.create_pool(database='postgres', min_size=1, max_size=1) From eab7fdf2f014785cdc2245c8bcb6bc086763b702 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 26 Oct 2022 14:35:44 -0700 Subject: [PATCH 094/193] Test on Python 3.11 and PostgreSQL 15, fix workflow deprecations (#968) --- .github/workflows/release.yml | 28 ++++++++++++++-------------- .github/workflows/tests.yml | 12 ++++++------ README.rst | 2 +- docs/index.rst | 2 +- tests/test_connect.py | 4 +++- 5 files changed, 25 insertions(+), 23 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 01b97a84..7f89128e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,7 +37,7 @@ jobs: mkdir -p dist/ echo "${VERSION}" > dist/VERSION - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: dist path: dist/ @@ -50,7 +50,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 50 submodules: true @@ -63,7 +63,7 @@ jobs: pip install -U setuptools wheel pip python setup.py sdist - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: dist path: dist/*.tar.* @@ -74,7 +74,7 @@ jobs: outputs: include: ${{ steps.set-matrix.outputs.include }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions/setup-python@v4 - run: pip install cibuildwheel==2.10.2 - id: set-matrix @@ -86,7 +86,7 @@ jobs: && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -Rc '{"only": inputs, "os": "windows-latest"}' } | jq -sc ) - echo ::set-output name=include::"$MATRIX_INCLUDE" + echo "include=$MATRIX_INCLUDE" >> $GITHUB_OUTPUT build-wheels: needs: build-wheels-matrix @@ -106,11 +106,11 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 50 submodules: true - + - name: Set up QEMU if: runner.os == 'Linux' uses: docker/setup-qemu-action@v2 @@ -123,7 +123,7 @@ jobs: CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28 CIBW_MANYLINUX_AARCH64_IMAGE: manylinux_2_28 - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: dist path: wheelhouse/*.whl @@ -137,7 +137,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 5 submodules: true @@ -153,7 +153,7 @@ jobs: make htmldocs - name: Checkout gh-pages - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 5 ref: gh-pages @@ -179,12 +179,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 5 submodules: false - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v3 with: name: dist path: dist/ @@ -193,7 +193,7 @@ jobs: id: relver run: | set -e - echo ::set-output name=version::$(cat dist/VERSION) + echo "version=$(cat dist/VERSION)" >> $GITHUB_OUTPUT rm dist/VERSION - name: Merge and tag the PR @@ -219,7 +219,7 @@ jobs: ls -al dist/ - name: Upload to PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f451cff8..f2340b5c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-rc.2"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: @@ -35,7 +35,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 50 submodules: true @@ -51,7 +51,7 @@ jobs: __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 if: steps.release.outputs.version == 0 with: python-version: ${{ matrix.python-version }} @@ -76,7 +76,7 @@ jobs: test-postgres: strategy: matrix: - postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14"] + postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15"] runs-on: ubuntu-latest @@ -84,7 +84,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 50 submodules: true @@ -111,7 +111,7 @@ jobs: >> "${GITHUB_ENV}" - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 if: steps.release.outputs.version == 0 - name: Install Python Deps diff --git a/README.rst b/README.rst index 01a28c00..e5212156 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ framework. You can read more about asyncpg in an introductory `blog post `_. asyncpg requires Python 3.7 or later and is supported for PostgreSQL -versions 9.5 to 14. Older PostgreSQL versions or other databases implementing +versions 9.5 to 15. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/docs/index.rst b/docs/index.rst index ee9f85d4..93671abc 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,7 +15,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. **asyncpg** requires Python 3.7 or later and is supported for PostgreSQL -versions 9.5 to 14. Older PostgreSQL versions or other databases implementing +versions 9.5 to 15. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. Contents diff --git a/tests/test_connect.py b/tests/test_connect.py index c8da7e29..4903fc03 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1287,6 +1287,7 @@ def setUp(self): create_script = [] create_script.append('CREATE ROLE ssl_user WITH LOGIN;') + create_script.append('GRANT ALL ON SCHEMA public TO ssl_user;') self._add_hba_entry() @@ -1301,6 +1302,7 @@ def tearDown(self): self.cluster.trust_local_connections() drop_script = [] + drop_script.append('REVOKE ALL ON SCHEMA public FROM ssl_user;') drop_script.append('DROP ROLE ssl_user;') drop_script = '\n'.join(drop_script) self.loop.run_until_complete(self.con.execute(drop_script)) @@ -1461,7 +1463,7 @@ async def test_executemany_uvloop_ssl_issue_700(self): ) finally: try: - await con.execute('DROP TABLE test_many') + await con.execute('DROP TABLE IF EXISTS test_many') finally: await con.close() From 925cfe15be00776cb9b1a7d76bca74e0b5b2f4f1 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 26 Oct 2022 14:37:20 -0700 Subject: [PATCH 095/193] asyncpg v0.27.0 Support Python 3.11 and PostgreSQL 15. This release also drops support for Python 3.6. Changes ======= * Add arm64 mac and linux wheels (by @ddelange in 7bd6c49f for #954) * Add Python 3.11 to the test matrix (by @elprans in 5f908e67 for #948) * Exclude .venv from flake8 (#958) (by @jparise in 40b16ea6 for #958) * Upgrade to flake8 5.0.4 (from 3.9.2) (#961) (by @jparise in 0e73fec2 for #961) * Show an example of a custom Record class (#960) (by @jparise in 84c99bfd for #960) * Use the exact type name in Record.__repr__ (#959) (by @jparise in eccdf61a for #959) * Drop Python 3.6 support (#940) (by @bryanforbes in bb0cb39d for #940) * Test on Python 3.11 and PostgreSQL 15, fix workflow deprecations (#968) (by @elprans in eab7fdf2 for #968) --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 7e897c90..263da2e2 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.26.0' +__version__ = '0.27.0' From 95cf254c3c0d86b278b702a901ca9e1b2b2d0902 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 26 Oct 2022 15:37:04 -0700 Subject: [PATCH 096/193] workflows/release: Don't ignore errors in individual wheel jobs --- .github/workflows/release.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7f89128e..c042ce4e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -91,7 +91,6 @@ jobs: build-wheels: needs: build-wheels-matrix runs-on: ${{ matrix.os }} - continue-on-error: true name: Build ${{ matrix.only }} strategy: From a6e2f183507e774aa75de5b6f325fdb2574a544e Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 26 Oct 2022 15:37:54 -0700 Subject: [PATCH 097/193] Post-release version bump --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 263da2e2..15958dc7 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.27.0' +__version__ = '0.27.0.dev0' From 8f6cc98f3f0f159d185e2fd5846b826edfa98048 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 26 Oct 2022 15:54:09 -0700 Subject: [PATCH 098/193] workflows: Use python-verion in setup-python actions explicitly Avoids a warning --- .github/workflows/release.yml | 6 +++++- .github/workflows/tests.yml | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c042ce4e..31f844ba 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -57,6 +57,8 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 + with: + python-version: "3.x" - name: Build source distribution run: | @@ -76,6 +78,8 @@ jobs: steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 + with: + python-version: "3.x" - run: pip install cibuildwheel==2.10.2 - id: set-matrix run: | @@ -144,7 +148,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: "3.x" - name: Build docs run: | diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f2340b5c..a120e9a6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -113,6 +113,8 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 if: steps.release.outputs.version == 0 + with: + python-version: "3.x" - name: Install Python Deps if: steps.release.outputs.version == 0 From 43bd82c54e29352ae08b1982c23b9cb0cc33717d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 26 Oct 2022 15:54:38 -0700 Subject: [PATCH 099/193] Correct the development version --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 15958dc7..693e3bed 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.27.0.dev0' +__version__ = '0.28.0.dev0' From d2e710fe296febb3c370319f87ef8c5b32152002 Mon Sep 17 00:00:00 2001 From: Floris van Nee Date: Mon, 28 Nov 2022 18:29:36 +0100 Subject: [PATCH 100/193] Do not try to cleanup statements (#981) This supports a case where we prepare an unnamed statement to inspect the return types. The statement should not be cleaned up afterwards because it is automatically cleaned up by Postgres --- asyncpg/connection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 365ab416..73cb6e66 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1416,6 +1416,7 @@ def _mark_stmts_as_closed(self): def _maybe_gc_stmt(self, stmt): if ( stmt.refs == 0 + and stmt.name and not self._stmt_cache.has( (stmt.query, stmt.record_class, stmt.ignore_custom_codec) ) From 9cb2c1ce044768ef7c396976b8abf896fad901f7 Mon Sep 17 00:00:00 2001 From: David Baumgold Date: Fri, 2 Dec 2022 23:41:35 -0600 Subject: [PATCH 101/193] Add Pool.is_closing() method (#973) --- asyncpg/pool.py | 7 +++++++ tests/test_pool.py | 11 +++++++++++ 2 files changed, 18 insertions(+) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 9bd2a3e3..eaf501f4 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -446,6 +446,13 @@ async def _initialize(self): await asyncio.gather(*connect_tasks) + def is_closing(self): + """Return ``True`` if the pool is closing or is closed. + + .. versionadded:: 0.28.0 + """ + return self._closed or self._closing + def get_size(self): """Return the current number of connections in this pool. diff --git a/tests/test_pool.py b/tests/test_pool.py index b77783e9..5577632c 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -740,6 +740,17 @@ async def test_pool_size_and_capacity(self): self.assertEqual(pool.get_size(), 3) self.assertEqual(pool.get_idle_size(), 0) + async def test_pool_closing(self): + async with self.create_pool() as pool: + self.assertFalse(pool.is_closing()) + await pool.close() + self.assertTrue(pool.is_closing()) + + async with self.create_pool() as pool: + self.assertFalse(pool.is_closing()) + pool.terminate() + self.assertTrue(pool.is_closing()) + async def test_pool_handles_transaction_exit_in_asyncgen_1(self): pool = await self.create_pool(database='postgres', min_size=1, max_size=1) From 7df9812a068c95e5dd4aa1d0270db8f177ee1e50 Mon Sep 17 00:00:00 2001 From: Anna Date: Sat, 3 Dec 2022 10:42:18 +0500 Subject: [PATCH 102/193] Fix test_tls_version for LibreSSL (#974) Context: https://github.com/python/cpython/issues/78182 --- tests/test_connect.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/test_connect.py b/tests/test_connect.py index 4903fc03..7707a1c9 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1497,13 +1497,14 @@ async def test_tls_version(self): '&ssl_min_protocol_version=TLSv1.1' '&ssl_max_protocol_version=TLSv1.1' ) - with self.assertRaisesRegex(ssl.SSLError, 'no protocols'): - await self.connect( - dsn='postgresql://ssl_user@localhost/postgres' - '?sslmode=require' - '&ssl_min_protocol_version=TLSv1.2' - '&ssl_max_protocol_version=TLSv1.1' - ) + if not ssl.OPENSSL_VERSION.startswith('LibreSSL'): + with self.assertRaisesRegex(ssl.SSLError, 'no protocols'): + await self.connect( + dsn='postgresql://ssl_user@localhost/postgres' + '?sslmode=require' + '&ssl_min_protocol_version=TLSv1.2' + '&ssl_max_protocol_version=TLSv1.1' + ) con = await self.connect( dsn='postgresql://ssl_user@localhost/postgres' '?sslmode=require' From 247b1a53ac3d460d101320e4ef602b4ca86aab79 Mon Sep 17 00:00:00 2001 From: ddelange <14880945+ddelange@users.noreply.github.com> Date: Mon, 13 Feb 2023 21:50:12 +0100 Subject: [PATCH 103/193] Fix missing PyPI wheels (#993) Fix for jq skipping first line `input` Outputs one new input. `inputs` Outputs all remaining inputs, one by one. https://github.com/pypa/cibuildwheel/discussions/1261#discussioncomment-4734804 --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 31f844ba..1eba94a5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -85,9 +85,9 @@ jobs: run: | MATRIX_INCLUDE=$( { - cibuildwheel --print-build-identifiers --platform linux --arch x86_64,aarch64 | grep cp | jq -Rc '{"only": inputs, "os": "ubuntu-latest"}' \ - && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,arm64 | grep cp | jq -Rc '{"only": inputs, "os": "macos-latest"}' \ - && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -Rc '{"only": inputs, "os": "windows-latest"}' + cibuildwheel --print-build-identifiers --platform linux --arch x86_64,aarch64 | grep cp | jq -nRc '{"only": inputs, "os": "ubuntu-latest"}' \ + && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,arm64 | grep cp | jq -nRc '{"only": inputs, "os": "macos-latest"}' \ + && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -nRc '{"only": inputs, "os": "windows-latest"}' } | jq -sc ) echo "include=$MATRIX_INCLUDE" >> $GITHUB_OUTPUT From 172b8f693f4b6885bec60001964fee13fc8de644 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9onard=20Besson?= Date: Sun, 12 Mar 2023 01:21:14 +0100 Subject: [PATCH 104/193] Handle environments without home dir (#1011) --- asyncpg/compat.py | 10 +++++--- asyncpg/connect_utils.py | 49 ++++++++++++++++++++++++++-------------- tests/test_connect.py | 29 ++++++++++++++++++++++++ 3 files changed, 68 insertions(+), 20 deletions(-) diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 29b8e16e..b9b13fa5 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -8,6 +8,7 @@ import asyncio import pathlib import platform +import typing SYSTEM = platform.uname().system @@ -18,7 +19,7 @@ CSIDL_APPDATA = 0x001a - def get_pg_home_directory() -> pathlib.Path: + def get_pg_home_directory() -> typing.Optional[pathlib.Path]: # We cannot simply use expanduser() as that returns the user's # home directory, whereas Postgres stores its config in # %AppData% on Windows. @@ -30,8 +31,11 @@ def get_pg_home_directory() -> pathlib.Path: return pathlib.Path(buf.value) / 'postgresql' else: - def get_pg_home_directory() -> pathlib.Path: - return pathlib.Path.home() + def get_pg_home_directory() -> typing.Optional[pathlib.Path]: + try: + return pathlib.Path.home() + except (RuntimeError, KeyError): + return None async def wait_closed(stream): diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 40905edf..4d1a3f7d 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -249,8 +249,13 @@ def _parse_tls_version(tls_version): ) -def _dot_postgresql_path(filename) -> pathlib.Path: - return (pathlib.Path.home() / '.postgresql' / filename).resolve() +def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]: + try: + homedir = pathlib.Path.home() + except (RuntimeError, KeyError): + return None + + return (homedir / '.postgresql' / filename).resolve() def _parse_connect_dsn_and_args(*, dsn, host, port, user, @@ -501,11 +506,16 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, ssl.load_verify_locations(cafile=sslrootcert) ssl.verify_mode = ssl_module.CERT_REQUIRED else: - sslrootcert = _dot_postgresql_path('root.crt') try: + sslrootcert = _dot_postgresql_path('root.crt') + assert sslrootcert is not None ssl.load_verify_locations(cafile=sslrootcert) - except FileNotFoundError: + except (AssertionError, FileNotFoundError): if sslmode > SSLMode.require: + if sslrootcert is None: + raise RuntimeError( + 'Cannot determine home directory' + ) raise ValueError( f'root certificate file "{sslrootcert}" does ' f'not exist\nEither provide the file or ' @@ -526,18 +536,20 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN else: sslcrl = _dot_postgresql_path('root.crl') - try: - ssl.load_verify_locations(cafile=sslcrl) - except FileNotFoundError: - pass - else: - ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN + if sslcrl is not None: + try: + ssl.load_verify_locations(cafile=sslcrl) + except FileNotFoundError: + pass + else: + ssl.verify_flags |= \ + ssl_module.VERIFY_CRL_CHECK_CHAIN if sslkey is None: sslkey = os.getenv('PGSSLKEY') if not sslkey: sslkey = _dot_postgresql_path('postgresql.key') - if not sslkey.exists(): + if sslkey is not None and not sslkey.exists(): sslkey = None if not sslpassword: sslpassword = '' @@ -549,12 +561,15 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, ) else: sslcert = _dot_postgresql_path('postgresql.crt') - try: - ssl.load_cert_chain( - sslcert, keyfile=sslkey, password=lambda: sslpassword - ) - except FileNotFoundError: - pass + if sslcert is not None: + try: + ssl.load_cert_chain( + sslcert, + keyfile=sslkey, + password=lambda: sslpassword + ) + except FileNotFoundError: + pass # OpenSSL 1.1.1 keylog file, copied from create_default_context() if hasattr(ssl, 'keylog_filename'): diff --git a/tests/test_connect.py b/tests/test_connect.py index 7707a1c9..a939db50 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -71,6 +71,14 @@ def mock_dot_postgresql(*, ca=True, crl=False, client=False, protected=False): yield +@contextlib.contextmanager +def mock_no_home_dir(): + with unittest.mock.patch( + 'pathlib.Path.home', unittest.mock.Mock(side_effect=RuntimeError) + ): + yield + + class TestSettings(tb.ConnectedTestCase): async def test_get_settings_01(self): @@ -1257,6 +1265,27 @@ async def test_connection_implicit_host(self): user=conn_spec.get('user')) await con.close() + @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') + async def test_connection_no_home_dir(self): + with mock_no_home_dir(): + con = await self.connect( + dsn='postgresql://foo/', + user='postgres', + database='postgres', + host='localhost') + await con.fetchval('SELECT 42') + await con.close() + + with self.assertRaisesRegex( + RuntimeError, + 'Cannot determine home directory' + ): + with mock_no_home_dir(): + await self.connect( + host='localhost', + user='ssl_user', + ssl='verify-full') + class BaseTestSSLConnection(tb.ConnectedTestCase): @classmethod From 7443a9e77e2eea43eb49c390eb28b0d9f00656e4 Mon Sep 17 00:00:00 2001 From: trigonometr <62100465+trigonometr@users.noreply.github.com> Date: Mon, 8 May 2023 19:09:45 +0300 Subject: [PATCH 105/193] fix: salt and iterations parsing for scram (#1026) Co-authored-by: Yaroslav Panichkin --- asyncpg/protocol/scram.pyx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asyncpg/protocol/scram.pyx b/asyncpg/protocol/scram.pyx index 765ddd46..9b485aee 100644 --- a/asyncpg/protocol/scram.pyx +++ b/asyncpg/protocol/scram.pyx @@ -156,12 +156,12 @@ cdef class SCRAMAuthentication: if not self.server_nonce.startswith(self.client_nonce): raise Exception("invalid nonce") try: - self.password_salt = re.search(b's=([^,]+),', + self.password_salt = re.search(b',s=([^,]+),', self.server_first_message).group(1) except IndexError: raise Exception("could not get salt") try: - self.password_iterations = int(re.search(b'i=(\d+),?', + self.password_iterations = int(re.search(b',i=(\d+),?', self.server_first_message).group(1)) except (IndexError, TypeError, ValueError): raise Exception("could not get iterations") From bf74e88bb5193b39cf942a6f0e9b838da3d0214a Mon Sep 17 00:00:00 2001 From: JesseDeLoore Date: Mon, 8 May 2023 18:18:11 +0200 Subject: [PATCH 106/193] Add support for target_session_attrs (#987) This adds support for the `target_session_attrs` connection option. Co-authored-by: rony batista Co-authored-by: Jesse De Loore --- asyncpg/_testbase/__init__.py | 90 +++++++++++++++++++ asyncpg/cluster.py | 2 +- asyncpg/connect_utils.py | 122 +++++++++++++++++++++++-- asyncpg/connection.py | 20 ++++- asyncpg/exceptions/_base.py | 6 +- tests/test_connect.py | 164 ++++++++++++++++++++++++++++++---- tests/test_pool.py | 48 +--------- 7 files changed, 377 insertions(+), 75 deletions(-) diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py index 9944b20f..7aca834f 100644 --- a/asyncpg/_testbase/__init__.py +++ b/asyncpg/_testbase/__init__.py @@ -435,3 +435,93 @@ def tearDown(self): self.con = None finally: super().tearDown() + + +class HotStandbyTestCase(ClusterTestCase): + + @classmethod + def setup_cluster(cls): + cls.master_cluster = cls.new_cluster(pg_cluster.TempCluster) + cls.start_cluster( + cls.master_cluster, + server_settings={ + 'max_wal_senders': 10, + 'wal_level': 'hot_standby' + } + ) + + con = None + + try: + con = cls.loop.run_until_complete( + cls.master_cluster.connect( + database='postgres', user='postgres', loop=cls.loop)) + + cls.loop.run_until_complete( + con.execute(''' + CREATE ROLE replication WITH LOGIN REPLICATION + ''')) + + cls.master_cluster.trust_local_replication_by('replication') + + conn_spec = cls.master_cluster.get_connection_spec() + + cls.standby_cluster = cls.new_cluster( + pg_cluster.HotStandbyCluster, + cluster_kwargs={ + 'master': conn_spec, + 'replication_user': 'replication' + } + ) + cls.start_cluster( + cls.standby_cluster, + server_settings={ + 'hot_standby': True + } + ) + + finally: + if con is not None: + cls.loop.run_until_complete(con.close()) + + @classmethod + def get_cluster_connection_spec(cls, cluster, kwargs={}): + conn_spec = cluster.get_connection_spec() + if kwargs.get('dsn'): + conn_spec.pop('host') + conn_spec.update(kwargs) + if not os.environ.get('PGHOST') and not kwargs.get('dsn'): + if 'database' not in conn_spec: + conn_spec['database'] = 'postgres' + if 'user' not in conn_spec: + conn_spec['user'] = 'postgres' + return conn_spec + + @classmethod + def get_connection_spec(cls, kwargs={}): + primary_spec = cls.get_cluster_connection_spec( + cls.master_cluster, kwargs + ) + standby_spec = cls.get_cluster_connection_spec( + cls.standby_cluster, kwargs + ) + return { + 'host': [primary_spec['host'], standby_spec['host']], + 'port': [primary_spec['port'], standby_spec['port']], + 'database': primary_spec['database'], + 'user': primary_spec['user'], + **kwargs + } + + @classmethod + def connect_primary(cls, **kwargs): + conn_spec = cls.get_cluster_connection_spec(cls.master_cluster, kwargs) + return pg_connection.connect(**conn_spec, loop=cls.loop) + + @classmethod + def connect_standby(cls, **kwargs): + conn_spec = cls.get_cluster_connection_spec( + cls.standby_cluster, + kwargs + ) + return pg_connection.connect(**conn_spec, loop=cls.loop) diff --git a/asyncpg/cluster.py b/asyncpg/cluster.py index 0999e41c..4467cc2a 100644 --- a/asyncpg/cluster.py +++ b/asyncpg/cluster.py @@ -626,7 +626,7 @@ def init(self, **settings): 'pg_basebackup init exited with status {:d}:\n{}'.format( process.returncode, output.decode())) - if self._pg_version <= (11, 0): + if self._pg_version < (12, 0): with open(os.path.join(self._data_dir, 'recovery.conf'), 'w') as f: f.write(textwrap.dedent("""\ standby_mode = 'on' diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 4d1a3f7d..8b29c0fc 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -13,6 +13,7 @@ import os import pathlib import platform +import random import re import socket import ssl as ssl_module @@ -56,6 +57,7 @@ def parse(cls, sslmode): 'direct_tls', 'connect_timeout', 'server_settings', + 'target_session_attrs', ]) @@ -260,7 +262,8 @@ def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]: def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, - direct_tls, connect_timeout, server_settings): + direct_tls, connect_timeout, server_settings, + target_session_attrs): # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. auth_hosts = None @@ -607,10 +610,28 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, 'server_settings is expected to be None or ' 'a Dict[str, str]') + if target_session_attrs is None: + + target_session_attrs = os.getenv( + "PGTARGETSESSIONATTRS", SessionAttribute.any + ) + try: + + target_session_attrs = SessionAttribute(target_session_attrs) + except ValueError as exc: + raise exceptions.InterfaceError( + "target_session_attrs is expected to be one of " + "{!r}" + ", got {!r}".format( + SessionAttribute.__members__.values, target_session_attrs + ) + ) from exc + params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, sslmode=sslmode, direct_tls=direct_tls, - connect_timeout=connect_timeout, server_settings=server_settings) + connect_timeout=connect_timeout, server_settings=server_settings, + target_session_attrs=target_session_attrs) return addrs, params @@ -620,8 +641,8 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, statement_cache_size, max_cached_statement_lifetime, max_cacheable_statement_size, - ssl, direct_tls, server_settings): - + ssl, direct_tls, server_settings, + target_session_attrs): local_vars = locals() for var_name in {'max_cacheable_statement_size', 'max_cached_statement_lifetime', @@ -649,7 +670,8 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, ssl=ssl, direct_tls=direct_tls, database=database, - connect_timeout=timeout, server_settings=server_settings) + connect_timeout=timeout, server_settings=server_settings, + target_session_attrs=target_session_attrs) config = _ClientConfiguration( command_timeout=command_timeout, @@ -882,18 +904,84 @@ async def __connect_addr( return con +class SessionAttribute(str, enum.Enum): + any = 'any' + primary = 'primary' + standby = 'standby' + prefer_standby = 'prefer-standby' + read_write = "read-write" + read_only = "read-only" + + +def _accept_in_hot_standby(should_be_in_hot_standby: bool): + """ + If the server didn't report "in_hot_standby" at startup, we must determine + the state by checking "SELECT pg_catalog.pg_is_in_recovery()". + If the server allows a connection and states it is in recovery it must + be a replica/standby server. + """ + async def can_be_used(connection): + settings = connection.get_settings() + hot_standby_status = getattr(settings, 'in_hot_standby', None) + if hot_standby_status is not None: + is_in_hot_standby = hot_standby_status == 'on' + else: + is_in_hot_standby = await connection.fetchval( + "SELECT pg_catalog.pg_is_in_recovery()" + ) + return is_in_hot_standby == should_be_in_hot_standby + + return can_be_used + + +def _accept_read_only(should_be_read_only: bool): + """ + Verify the server has not set default_transaction_read_only=True + """ + async def can_be_used(connection): + settings = connection.get_settings() + is_readonly = getattr(settings, 'default_transaction_read_only', 'off') + + if is_readonly == "on": + return should_be_read_only + + return await _accept_in_hot_standby(should_be_read_only)(connection) + return can_be_used + + +async def _accept_any(_): + return True + + +target_attrs_check = { + SessionAttribute.any: _accept_any, + SessionAttribute.primary: _accept_in_hot_standby(False), + SessionAttribute.standby: _accept_in_hot_standby(True), + SessionAttribute.prefer_standby: _accept_in_hot_standby(True), + SessionAttribute.read_write: _accept_read_only(False), + SessionAttribute.read_only: _accept_read_only(True), +} + + +async def _can_use_connection(connection, attr: SessionAttribute): + can_use = target_attrs_check[attr] + return await can_use(connection) + + async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): if loop is None: loop = asyncio.get_event_loop() addrs, params, config = _parse_connect_arguments(timeout=timeout, **kwargs) + target_attr = params.target_session_attrs + candidates = [] + chosen_connection = None last_error = None - addr = None for addr in addrs: before = time.monotonic() try: - return await _connect_addr( + conn = await _connect_addr( addr=addr, loop=loop, timeout=timeout, @@ -902,12 +990,30 @@ async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): connection_class=connection_class, record_class=record_class, ) + candidates.append(conn) + if await _can_use_connection(conn, target_attr): + chosen_connection = conn + break except (OSError, asyncio.TimeoutError, ConnectionError) as ex: last_error = ex finally: timeout -= time.monotonic() - before + else: + if target_attr == SessionAttribute.prefer_standby and candidates: + chosen_connection = random.choice(candidates) + + await asyncio.gather( + (c.close() for c in candidates if c is not chosen_connection), + return_exceptions=True + ) + + if chosen_connection: + return chosen_connection - raise last_error + raise last_error or exceptions.TargetServerAttributeNotMatched( + 'None of the hosts match the target attribute requirement ' + '{!r}'.format(target_attr) + ) async def _cancel(*, loop, addr, params: _ConnectionParameters, diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 73cb6e66..432fcef6 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1792,7 +1792,8 @@ async def connect(dsn=None, *, direct_tls=False, connection_class=Connection, record_class=protocol.Record, - server_settings=None): + server_settings=None, + target_session_attrs=None): r"""A coroutine to establish a connection to a PostgreSQL server. The connection parameters may be specified either as a connection @@ -2003,6 +2004,22 @@ async def connect(dsn=None, *, this connection object. Must be a subclass of :class:`~asyncpg.Record`. + :param SessionAttribute target_session_attrs: + If specified, check that the host has the correct attribute. + Can be one of: + "any": the first successfully connected host + "primary": the host must NOT be in hot standby mode + "standby": the host must be in hot standby mode + "read-write": the host must allow writes + "read-only": the host most NOT allow writes + "prefer-standby": first try to find a standby host, but if + none of the listed hosts is a standby server, + return any of them. + + If not specified will try to use PGTARGETSESSIONATTRS + from the environment. + Defaults to "any" if no value is set. + :return: A :class:`~asyncpg.connection.Connection` instance. Example: @@ -2109,6 +2126,7 @@ async def connect(dsn=None, *, statement_cache_size=statement_cache_size, max_cached_statement_lifetime=max_cached_statement_lifetime, max_cacheable_statement_size=max_cacheable_statement_size, + target_session_attrs=target_session_attrs ) diff --git a/asyncpg/exceptions/_base.py b/asyncpg/exceptions/_base.py index 783b5eb5..de981d25 100644 --- a/asyncpg/exceptions/_base.py +++ b/asyncpg/exceptions/_base.py @@ -13,7 +13,7 @@ __all__ = ('PostgresError', 'FatalPostgresError', 'UnknownPostgresError', 'InterfaceError', 'InterfaceWarning', 'PostgresLogMessage', 'InternalClientError', 'OutdatedSchemaCacheError', 'ProtocolError', - 'UnsupportedClientFeatureError') + 'UnsupportedClientFeatureError', 'TargetServerAttributeNotMatched') def _is_asyncpg_class(cls): @@ -244,6 +244,10 @@ class ProtocolError(InternalClientError): """Unexpected condition in the handling of PostgreSQL protocol input.""" +class TargetServerAttributeNotMatched(InternalClientError): + """Could not find a host that satisfies the target attribute requirement""" + + class OutdatedSchemaCacheError(InternalClientError): """A value decoding error caused by a schema change before row fetching.""" diff --git a/tests/test_connect.py b/tests/test_connect.py index a939db50..e3cfb372 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -24,7 +24,7 @@ import asyncpg from asyncpg import _testbase as tb -from asyncpg import connection +from asyncpg import connection as pg_connection from asyncpg import connect_utils from asyncpg import cluster as pg_cluster from asyncpg import exceptions @@ -392,7 +392,8 @@ class TestConnectParams(tb.TestCase): 'password': 'passw', 'database': 'testdb', 'ssl': True, - 'sslmode': SSLMode.prefer}) + 'sslmode': SSLMode.prefer, + 'target_session_attrs': 'any'}) }, { @@ -414,7 +415,8 @@ class TestConnectParams(tb.TestCase): 'result': ([('host2', 456)], { 'user': 'user2', 'password': 'passw2', - 'database': 'db2'}) + 'database': 'db2', + 'target_session_attrs': 'any'}) }, { @@ -442,7 +444,8 @@ class TestConnectParams(tb.TestCase): 'password': 'passw2', 'database': 'db2', 'sslmode': SSLMode.disable, - 'ssl': False}) + 'ssl': False, + 'target_session_attrs': 'any'}) }, { @@ -463,7 +466,8 @@ class TestConnectParams(tb.TestCase): 'password': '123123', 'database': 'abcdef', 'ssl': True, - 'sslmode': SSLMode.allow}) + 'sslmode': SSLMode.allow, + 'target_session_attrs': 'any'}) }, { @@ -491,7 +495,8 @@ class TestConnectParams(tb.TestCase): 'password': 'passw2', 'database': 'db2', 'sslmode': SSLMode.disable, - 'ssl': False}) + 'ssl': False, + 'target_session_attrs': 'any'}) }, { @@ -512,7 +517,8 @@ class TestConnectParams(tb.TestCase): 'password': '123123', 'database': 'abcdef', 'ssl': True, - 'sslmode': SSLMode.prefer}) + 'sslmode': SSLMode.prefer, + 'target_session_attrs': 'any'}) }, { @@ -521,7 +527,8 @@ class TestConnectParams(tb.TestCase): 'result': ([('localhost', 5555)], { 'user': 'user3', 'password': '123123', - 'database': 'abcdef'}) + 'database': 'abcdef', + 'target_session_attrs': 'any'}) }, { @@ -530,6 +537,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('host1', 5432), ('host2', 5432)], { 'database': 'db', 'user': 'user', + 'target_session_attrs': 'any', }) }, @@ -539,6 +547,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('host1', 1111), ('host2', 2222)], { 'database': 'db', 'user': 'user', + 'target_session_attrs': 'any', }) }, @@ -548,6 +557,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('2001:db8::1234%eth0', 5432), ('::1', 5432)], { 'database': 'db', 'user': 'user', + 'target_session_attrs': 'any', }) }, @@ -557,6 +567,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('2001:db8::1234', 1111), ('::1', 2222)], { 'database': 'db', 'user': 'user', + 'target_session_attrs': 'any', }) }, @@ -566,6 +577,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('2001:db8::1234', 5432), ('::1', 5432)], { 'database': 'db', 'user': 'user', + 'target_session_attrs': 'any', }) }, @@ -580,6 +592,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('host1', 1111), ('host2', 2222)], { 'database': 'db', 'user': 'foo', + 'target_session_attrs': 'any', }) }, @@ -592,6 +605,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('host1', 1111), ('host2', 2222)], { 'database': 'db', 'user': 'foo', + 'target_session_attrs': 'any', }) }, @@ -605,6 +619,7 @@ class TestConnectParams(tb.TestCase): 'result': ([('host1', 5432), ('host2', 5432)], { 'database': 'db', 'user': 'foo', + 'target_session_attrs': 'any', }) }, @@ -624,7 +639,8 @@ class TestConnectParams(tb.TestCase): 'password': 'ask', 'database': 'db', 'ssl': True, - 'sslmode': SSLMode.require}) + 'sslmode': SSLMode.require, + 'target_session_attrs': 'any'}) }, { @@ -645,7 +661,8 @@ class TestConnectParams(tb.TestCase): 'password': 'ask', 'database': 'db', 'sslmode': SSLMode.verify_full, - 'ssl': True}) + 'ssl': True, + 'target_session_attrs': 'any'}) }, { @@ -653,7 +670,8 @@ class TestConnectParams(tb.TestCase): 'dsn': 'postgresql:///dbname?host=/unix_sock/test&user=spam', 'result': ([os.path.join('/unix_sock/test', '.s.PGSQL.5432')], { 'user': 'spam', - 'database': 'dbname'}) + 'database': 'dbname', + 'target_session_attrs': 'any'}) }, { @@ -665,6 +683,7 @@ class TestConnectParams(tb.TestCase): 'user': 'us@r', 'password': 'p@ss', 'database': 'db', + 'target_session_attrs': 'any', } ) }, @@ -678,6 +697,7 @@ class TestConnectParams(tb.TestCase): 'user': 'user', 'password': 'p', 'database': 'db', + 'target_session_attrs': 'any', } ) }, @@ -690,6 +710,7 @@ class TestConnectParams(tb.TestCase): { 'user': 'us@r', 'database': 'db', + 'target_session_attrs': 'any', } ) }, @@ -717,7 +738,8 @@ class TestConnectParams(tb.TestCase): 'user': 'user', 'database': 'user', 'sslmode': SSLMode.disable, - 'ssl': None + 'ssl': None, + 'target_session_attrs': 'any', } ) }, @@ -731,7 +753,8 @@ class TestConnectParams(tb.TestCase): '.s.PGSQL.5432' )], { 'user': 'spam', - 'database': 'db' + 'database': 'db', + 'target_session_attrs': 'any', } ) }, @@ -752,6 +775,7 @@ class TestConnectParams(tb.TestCase): 'database': 'db', 'ssl': True, 'sslmode': SSLMode.prefer, + 'target_session_attrs': 'any', } ) }, @@ -796,6 +820,7 @@ def run_testcase(self, testcase): database = testcase.get('database') sslmode = testcase.get('ssl') server_settings = testcase.get('server_settings') + target_session_attrs = testcase.get('target_session_attrs') expected = testcase.get('result') expected_error = testcase.get('error') @@ -819,7 +844,8 @@ def run_testcase(self, testcase): dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, database=database, ssl=sslmode, direct_tls=False, connect_timeout=None, - server_settings=server_settings) + server_settings=server_settings, + target_session_attrs=target_session_attrs) params = { k: v for k, v in params._asdict().items() @@ -880,7 +906,9 @@ def test_test_connect_params_run_testcase(self): 'host': 'abc', 'result': ( [('abc', 5432)], - {'user': '__test__', 'database': '__test__'} + {'user': '__test__', + 'database': '__test__', + 'target_session_attrs': 'any'} ) }) @@ -918,6 +946,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for user@abc', 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -934,6 +963,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for user@abc', 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -948,6 +978,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for user@abc', 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -963,6 +994,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for localhost', 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -980,6 +1012,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for localhost', 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -997,6 +1030,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for cde:5433', 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1013,6 +1047,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for testuser', 'user': 'testuser', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1029,6 +1064,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass for testdb', 'user': 'user', 'database': 'testdb', + 'target_session_attrs': 'any', } ) }) @@ -1045,6 +1081,7 @@ def test_connect_pgpass_regular(self): 'password': 'password from pgpass with escapes', 'user': R'test\\', 'database': R'test\:db', + 'target_session_attrs': 'any', } ) }) @@ -1072,6 +1109,7 @@ def test_connect_pgpass_badness_mode(self): { 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1092,6 +1130,7 @@ def test_connect_pgpass_badness_non_file(self): { 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1108,6 +1147,7 @@ def test_connect_pgpass_nonexistent(self): { 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1128,6 +1168,7 @@ def test_connect_pgpass_inaccessible_file(self): { 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1150,6 +1191,7 @@ def test_connect_pgpass_inaccessible_directory(self): { 'user': 'user', 'database': 'db', + 'target_session_attrs': 'any', } ) }) @@ -1172,7 +1214,7 @@ async def test_connect_args_validation(self): class TestConnection(tb.ConnectedTestCase): async def test_connection_isinstance(self): - self.assertTrue(isinstance(self.con, connection.Connection)) + self.assertTrue(isinstance(self.con, pg_connection.Connection)) self.assertTrue(isinstance(self.con, object)) self.assertFalse(isinstance(self.con, list)) @@ -1765,8 +1807,96 @@ async def test_no_explicit_close_with_debug(self): r'unclosed connection') as rw: await self._run_no_explicit_close_test() - msg = rw.warning.args[0] + msg = " ".join(rw.warning.args) self.assertIn(' created at:\n', msg) self.assertIn('in test_no_explicit_close_with_debug', msg) finally: self.loop.set_debug(olddebug) + + +class TestConnectionAttributes(tb.HotStandbyTestCase): + + async def _run_connection_test( + self, connect, target_attribute, expected_port + ): + conn = await connect(target_session_attrs=target_attribute) + self.assertTrue(_get_connected_host(conn).endswith(expected_port)) + await conn.close() + + async def test_target_server_attribute_port(self): + master_port = self.master_cluster.get_connection_spec()['port'] + standby_port = self.standby_cluster.get_connection_spec()['port'] + tests = [ + (self.connect_primary, 'primary', master_port), + (self.connect_standby, 'standby', standby_port), + ] + + for connect, target_attr, expected_port in tests: + await self._run_connection_test( + connect, target_attr, expected_port + ) + if self.master_cluster.get_pg_version()[0] < 14: + self.skipTest("PostgreSQL<14 does not support these features") + tests = [ + (self.connect_primary, 'read-write', master_port), + (self.connect_standby, 'read-only', standby_port), + ] + + for connect, target_attr, expected_port in tests: + await self._run_connection_test( + connect, target_attr, expected_port + ) + + async def test_target_attribute_not_matched(self): + tests = [ + (self.connect_standby, 'primary'), + (self.connect_primary, 'standby'), + ] + + for connect, target_attr in tests: + with self.assertRaises(exceptions.TargetServerAttributeNotMatched): + await connect(target_session_attrs=target_attr) + + if self.master_cluster.get_pg_version()[0] < 14: + self.skipTest("PostgreSQL<14 does not support these features") + tests = [ + (self.connect_standby, 'read-write'), + (self.connect_primary, 'read-only'), + ] + + for connect, target_attr in tests: + with self.assertRaises(exceptions.TargetServerAttributeNotMatched): + await connect(target_session_attrs=target_attr) + + async def test_prefer_standby_when_standby_is_up(self): + con = await self.connect(target_session_attrs='prefer-standby') + standby_port = self.standby_cluster.get_connection_spec()['port'] + connected_host = _get_connected_host(con) + self.assertTrue(connected_host.endswith(standby_port)) + await con.close() + + async def test_prefer_standby_picks_master_when_standby_is_down(self): + primary_spec = self.get_cluster_connection_spec(self.master_cluster) + connection_spec = { + 'host': [ + primary_spec['host'], + 'unlocalhost', + ], + 'port': [primary_spec['port'], 15345], + 'database': primary_spec['database'], + 'user': primary_spec['user'], + 'target_session_attrs': 'prefer-standby' + } + + con = await self.connect(**connection_spec) + master_port = self.master_cluster.get_connection_spec()['port'] + connected_host = _get_connected_host(con) + self.assertTrue(connected_host.endswith(master_port)) + await con.close() + + +def _get_connected_host(con): + peername = con._transport.get_extra_info('peername') + if isinstance(peername, tuple): + peername = "".join((str(s) for s in peername if s)) + return peername diff --git a/tests/test_pool.py b/tests/test_pool.py index 5577632c..540efb08 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -17,7 +17,6 @@ import asyncpg from asyncpg import _testbase as tb from asyncpg import connection as pg_connection -from asyncpg import cluster as pg_cluster from asyncpg import pool as pg_pool _system = platform.uname().system @@ -971,52 +970,7 @@ async def worker(): @unittest.skipIf(os.environ.get('PGHOST'), 'using remote cluster for testing') -class TestHotStandby(tb.ClusterTestCase): - @classmethod - def setup_cluster(cls): - cls.master_cluster = cls.new_cluster(pg_cluster.TempCluster) - cls.start_cluster( - cls.master_cluster, - server_settings={ - 'max_wal_senders': 10, - 'wal_level': 'hot_standby' - } - ) - - con = None - - try: - con = cls.loop.run_until_complete( - cls.master_cluster.connect( - database='postgres', user='postgres', loop=cls.loop)) - - cls.loop.run_until_complete( - con.execute(''' - CREATE ROLE replication WITH LOGIN REPLICATION - ''')) - - cls.master_cluster.trust_local_replication_by('replication') - - conn_spec = cls.master_cluster.get_connection_spec() - - cls.standby_cluster = cls.new_cluster( - pg_cluster.HotStandbyCluster, - cluster_kwargs={ - 'master': conn_spec, - 'replication_user': 'replication' - } - ) - cls.start_cluster( - cls.standby_cluster, - server_settings={ - 'hot_standby': True - } - ) - - finally: - if con is not None: - cls.loop.run_until_complete(con.close()) - +class TestHotStandby(tb.HotStandbyTestCase): def create_pool(self, **kwargs): conn_spec = self.standby_cluster.get_connection_spec() conn_spec.update(kwargs) From 2f20bae772d71122e64f424cc4124e2ebdd46a58 Mon Sep 17 00:00:00 2001 From: "Benoit C. Sirois" Date: Tue, 30 May 2023 17:18:28 -0400 Subject: [PATCH 107/193] Add support for READ UNCOMMITTED (#1039) --- asyncpg/connection.py | 6 +++--- asyncpg/transaction.py | 10 +++++++++- tests/test_transaction.py | 2 ++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 432fcef6..0b13d356 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -257,9 +257,9 @@ def transaction(self, *, isolation=None, readonly=False, :param isolation: Transaction isolation mode, can be one of: `'serializable'`, `'repeatable_read'`, - `'read_committed'`. If not specified, the behavior - is up to the server and session, which is usually - ``read_committed``. + `'read_uncommitted'`, `'read_committed'`. If not + specified, the behavior is up to the server and + session, which is usually ``read_committed``. :param readonly: Specifies whether or not this transaction is read-only. diff --git a/asyncpg/transaction.py b/asyncpg/transaction.py index 2d7ba49f..562811e6 100644 --- a/asyncpg/transaction.py +++ b/asyncpg/transaction.py @@ -19,9 +19,15 @@ class TransactionState(enum.Enum): FAILED = 4 -ISOLATION_LEVELS = {'read_committed', 'serializable', 'repeatable_read'} +ISOLATION_LEVELS = { + 'read_committed', + 'read_uncommitted', + 'serializable', + 'repeatable_read', +} ISOLATION_LEVELS_BY_VALUE = { 'read committed': 'read_committed', + 'read uncommitted': 'read_uncommitted', 'serializable': 'serializable', 'repeatable read': 'repeatable_read', } @@ -124,6 +130,8 @@ async def start(self): query = 'BEGIN' if self._isolation == 'read_committed': query += ' ISOLATION LEVEL READ COMMITTED' + elif self._isolation == 'read_uncommitted': + query += ' ISOLATION LEVEL READ UNCOMMITTED' elif self._isolation == 'repeatable_read': query += ' ISOLATION LEVEL REPEATABLE READ' elif self._isolation == 'serializable': diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 8b7ffd95..f84cf7c0 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -188,6 +188,7 @@ async def test_isolation_level(self): isolation_levels = { None: default_isolation, 'read_committed': 'read committed', + 'read_uncommitted': 'read uncommitted', 'repeatable_read': 'repeatable read', 'serializable': 'serializable', } @@ -214,6 +215,7 @@ async def test_nested_isolation_level(self): set_sql = 'SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL ' isolation_levels = { 'read_committed': 'read committed', + 'read_uncommitted': 'read uncommitted', 'repeatable_read': 'repeatable read', 'serializable': 'serializable', } From 7d4fcf04fa98d141d145cf37e893de0fd7264997 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 22 Jun 2023 16:31:52 -0700 Subject: [PATCH 108/193] Update benchmarks, add psycopg3 (#1042) Fixes: #924 --- README.rst | 7 +++---- performance.png | Bin 23944 -> 17878 bytes 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index e5212156..42c7fc26 100644 --- a/README.rst +++ b/README.rst @@ -28,15 +28,14 @@ The project documentation can be found Performance ----------- -In our testing asyncpg is, on average, **3x** faster than psycopg2 -(and its asyncio variant -- aiopg). +In our testing asyncpg is, on average, **5x** faster than psycopg3. .. image:: https://raw.githubusercontent.com/MagicStack/asyncpg/master/performance.png - :target: https://gistpreview.github.io/?b8eac294ac85da177ff82f784ff2cb60 + :target: https://gistpreview.github.io/?0ed296e93523831ea0918d42dd1258c2 The above results are a geometric mean of benchmarks obtained with PostgreSQL `client driver benchmarking toolbench `_ -in November 2020 (click on the chart to see full details). +in June 2023 (click on the chart to see full details). Features diff --git a/performance.png b/performance.png index d5f9d5071ff3fe26994f1057952180794e3cad03..b4a5726f8a2b81b6d22edc109cda5280c1b8fc72 100644 GIT binary patch literal 17878 zcmch92Ut_t*7l(*2si@bfPftZ1Ox%43nCy@x*!A#(xfQ81{Fk6x=625LkU$ni6A1q zgVI8gUIK&`NWy;-aPH{Ly?4I<`TpPMnaAO<&n|1P_g(K=dmmqEsw>jdve5znK(DNH zT?+sx2>?K@e1L-ZY+_;pfj~@6P4C{ld+*-8`}glZc<{i?%*CP;hW?NJvO%XlPhiSa^8&+qZ8cA|l?sdlwlQ83q2yu8)q6j){qh zjg5_qi;ItsPe@2eOiWBlN=i;nPDx35|NebyYHC_qT6%hVMn*& z+S}VZIyyQ#JG;8Ny1To3dU|?$d;9wO`uqF8e*OCG+qZ#%fx*GSp`oGS;o*^y5flnF zIyyQwHa0#!J~1)z{rmUH$;qjysp;wInVFf{+1a_dx%v6|g@uL1#l@wiB{Uknyu7@! zva-6miosyk*4Eb7*EcpcHa9o7wzjZX?DqEd&dv@Fhr{FX1Oj1qcUR3=4s;TESLN&1 zbX*4(hj-<>*TLuPgW&~7PaPh=eqBLMUTfl*|HLH)_gK)t0=3egFR27a`+t#qnD+UW z5XQbm;L|1MVr8a=^+X&lpZ<66Mt3Z%bvRGvlVf|s{G^x1!jsQ6w9|3CP2XQ5ubLC$ z9NKVKr{k~=ZC5eVaSl`MSEpMN9onL0rsIU6ZU3x6UdPr>_Zv@^E7L7p$WBIXp^O(F z=(pz{^9$@isSG#?s}iw@hsMou6~7x!7*EAvc{vG9*6A93ZR`Bt=q!xTkK?o9flpnc z9j4%}TSkZ9n5&;{nsSQ-M-T8vl$NZD1NW_qdn;S6V)P|(XXlu6h6*RvSygns>h_;q z>brGHS*tl;jMzog)^qE`g>TrlG2W4TZPAE(6Ya#F-XeNt>a9m0;FFXI^hVH4%hUFc z@FAp8UnXFU&qP?C7EHZ3Vg@!CAG7lxQ0%zj)8x1RGyH>l*deG|yDMo{Gg64`#?}5~ zEBg!REz$m+a6_K{$IGvshZWez48uoAE5zI40;_%#2+X@8iht-kiLb%iBXH-kHYV1;r~1zb$vbwcfA z7w{-doOB{j)w@uYq`J-oc%%+o4KS&4nUOVM!K9uJzOzAxdn|=XKy|z@8hHRN2Nj68 z?;ul7N5fJfj@OU3iXFWfp-!%-BO!M$&6!-r_nCI8bEf6?mmbgc)GM4kmDrbNh%a1k zHqhU`bwh`ZK)Y{x5LRPrp7*6NX6&Y1cI4PHu#qj2rg*^;>swkvrPMoDxsaTpD?{~2 z6Mg!f6gB8Mm`#7F^_3X+LT(K!D%q4XPyAnLUOAYrW1g%Gp?HLFnNeS)zLMOh$mel~ z5Z13fa-TTQc!bQn=EirA==M87^QWdu4*2kU0ly7XZe3seCf`4E44K))Rj?4bwKH9D z*VM`*$y{db9Oy#$Hr()q{oC%spBgq~4tH~BQi!``3avFtqAm8A7MS}e#ljU+k!45;*@ zZvv~VIpiaK>ZZyiL@z^ceitav+`%HMH#akWZ{6NC>7|sZe>RVVk(?0i^4+db*I1a~ zioRCWepMWQVnp<_j`&Ui!jI8&LBIvjBx3Bi_setSSLp= zhMiD8#XLl2kWOaN10aJu>_R+vy;fDKmq9Q1#HD!jlSvi%^0fu7y;lPf(DTv>7}nC~ z%f~u0^pb8jJheXCd1j%DU6v>BBj^P^Ku4NXu))S_kXKqzTpv-gW#j&{GLJG*jE}TD z!a~_NsJ5B8%RA z^K;;GS>JR16$PzZbl(hkUQC$hHsJ@;n1m~!xWJTD~7%?TX%s69N zfn5Q!V_ez16pixJC)zs^oO(2(_Pao|cah>81i!n%Nu*dND3?wm&5`Cac(<;7$}I71cp3=% zrkNYO*JT}K^4_S2?(p}9u5)Oi=@!V!GHVW*N$0gBw;&Qnx^89XKIcsiNPNm#_k$Ew z?4acaJAf50iKOc5Q1&`q0m`FI6VUd2a9k-W0S|&VrWs8*)PdN@A$HS_-nIv4xa&aJ zfbmba?^^LP>e{J7cLpA~NM?@2C?$6G8|aOATTUfU_q|n&goa8>4cxw6VGo%jm<3P4 zZ7~^QRpp=wpa3-7xM>oduY-VTwI3qO(xnwRuZ^cfa%MRPrLdwo#1HG$OU@oZ8bqY5 zgosC1FR*Tfr&7i4C{}+2Uq=W)WlCUY{Sn`Vw`qpl6taCz!#GYBXh}TflKXf;OTrOd z(wK#vQ+_O|vWtQWa6^}#DcSyrX;zzDcn$T}yo7~hi4yivU&FCfJ>bmEQ!Y;;r5K=q%m&}G zST*Qk-lz;|{^@0xQgKHzSB-dklh?pZ7tP^LH6M+h1S!YBptLkT|IK0+pECX_evjjX zBao`{)a$f3H9K$*WI|-5yW^O$HswpiMul^!lN9ZQ@&F&fiZ5-VB2v@SaMv0w}4FJ5Wadwn7MQlJ#88ke)!+INTpjQWb(oWY1svo3?yL~cO`f46&2pq98Ag3X|lJ>aqW7A(bEcca*5;bRdYIznx4^df01R|OX0#khj^-Ur`leKnef}!)QvNz zMZ|9|fldmq)(Ybhm#&(bmL@`l8QQx|n=a1m?rjqXa{yp0yJouKEi~O(6`~Gsb!^IUooFs4l&ti7*!HqT#d& z_+%o^Xs?x@@JJRBer_H)EUfHNej%wuARJv2;spXGvW&1e}s z!if!D$+YFd-^rE0cZP`wIyJ!SZKCgIAg4FQw^5zS@A)nCcl1g=}jcj5V zRH%bg*U%i}4?UW=%s`}aMrfzOwt3(-p6HQ~ODmE1ptOfF(_>)x5F!Y1Etd}Ah%hdNZ za*vX+>X@3(g{vDX!oiZI>5>(=*K4z#ZB1Q%J6c1fEyTfA!w`-9t>b8aKUx6;Y4n*8 z!?fjRLcJQmLa5dBdGQJDEb3ZV6gLOiv23l&oLbyPkznX*ftajrYG)fh!K(20z36P2 z{h5G!E_*$&s&Usk&KNOFDc&pkF@h&PuH=v56y_>ono(pW8VHKq+lgd#0cy2_1LUbz zBKUH?Jsm2b`u0U01y{CBpAa#AH@AC0cRb3%no*~W8x!IV_=N*1PDf+bM3Bzw586kv zh5_y#TJ93^6St0(hga^6vym0td=(csH?kR&*nbMBSR=dc$TZ4SV%~1@{{Ae>@^ z*Gz;tuYRlk4BgKZ>i;W+5{U$^x#giaAbt=%Q_f6N%;`g6*nU=*Om&x%$I)rq+@FE0 zBU%0`NzQ;R2a~s%LtvfXxLwd_4dGRF5QdZ7(B1c9z*Z5%W5?}lRW@(3n7_C!*o{$e z?d(YYF{Pb#HXybn(N0)n%(l-vU8o(mF+aJG9Bu9m=cV=#ZpZO=XtoLMu7VPnmzh8< zWH)c2FY`*%p2jqqI_Vb0EF`G1$iF5+8k=VF1xI5WN!j>^b6mU=5wxW8D%&$FYDyXRz-$N&8@_zM& ztWAr_-hgdHmIGWyj1RYXnNj-u(2d9+x-pBzht8(pOCe#c{JwF-wJP&f0%_wTr$K(Z z33VH4zDFXsP7)-75H60}j|E=8Q>rEYoWMrV>sObWQV8Xl$F=S$Ol)@0PqIx0^iyFd z=u74VVhaB6_1%ALNbG?)B1sB@eGr|W)GMeGiGI{h?6(Ety;7?0w22>f)}`3XL?704 zSHTN%s_Gs0G;l2j(O%5v!&%)}Mh>)DC~qL$<0+K<-kQGNAkWw0e)5iF-jB;-D??R@!Z;2+ z8USt0R0ji0l}f&y&W`YAQC01CRiFV3vTNe@m_ra}(;fL>OCo7K1nZ`(3v|mor8SNx zd`p%G9A`kd5w{RT8^14sTw{9#mH6;`{CAMiR0vvu1?=58j(yQX^@MZBXbagukZ!+C za%rksRDVDpSk$U#fn~Grih^0VGyZECaPUcMNZC6d!$a)^dvL9xYh z0LtwIXa5(X0g0Y?sg2yVMuBy?DH;39GfH7aBzB~oc7k>za9zKkIZ4|Jl%Skt9KH=c z(j{&Cc^L`JMO*XQo^M}?43NL^z1c31dLHE(xM2RZ3duOu|??8To1=m6#dei8(!vG|sezztHEkRXh7 z&Nfu@8UB6RM`ZeGxcC#WK=!|a*8SK^Oxyk~RuhRy9ylJD<&l6q5#n-%`83o+R}2$e zx+!`?g?&Sv{6{%y`bRlwVXvH&suAq0_l|!kvH}W15=1|UNicC5Ll5#$t(R!c8=ok- zIYwm$O9Z~2?rpoZHeP+7<}bk-WR4%vdW$`8MjcrmB~AS+oY#{7nlOQ`3-S=?siY)n zNdPfh`Dl0!h*|$#Fz4%;s@#jRFg=u6uw?G`1=Hv5clRqxMIeTVLwb+w45I2H?hv{- zba_>MYn6S{#Oc(w*-R_l3mX=CY<-xl{K5^%Le9S$B1H|*PHsCr4U;r0o5=*)pQ}gr z`$*NJF)16W6M3)8K|%v<3D4iM?mqc*unx{56^s5HtU;@>9EI-c@sauWk|B2nKIQWO z$FyuVlC07oR7~9>CY@wJA54H4M2StAp^elWlnfS954=tORS$BWISg=!zb)dsKk~-Y z=rM68EwRBB7GGzIR zu=w{g^&b8VzPgt*#8^D1)t4^T+a&)5^8V>i&JK3~ z1aPDdl^~uev!i!5kZAZ@72~hOSAIfQhL|7VEjeT(fiOJ+ybib zST|air!TgC3A{0+z{?1kf_TRhVPA`ibSE2iRt`Dy8|@On*9}~@0!;lYe&}e|eH-!~ zCfL%9T}sl(l`RUQ=)akw{54I?N9WFSu*j4}mZajrSpWuRqKhXY=+}3is#$3WX}y0h z`=0>1^26XH&5U#nY+@%bA^W$`Y<|Db%rzJn?a(tw1AVi@Xl1D3$L|TQn%|sGdyRIc zD=nm9fQBAgq~8aF2e+ZyLqAL_If)*Tcl$#p2j1KB)drM=IKC`WLFLR{Vtmt}hh|%6 zaAH!FBAmb$qz4EMlB4d&zyB4G5$*IZHT)k@2@)?c?1KAEe1-VGV5BC6C(I`N!>Z=` z=exkU%86_1eb=|&>}Se95cx-@yeY8fU$vwEykfbxo3zjWU5yex-a+ z(H!d({x_#IKV`?n&~liNXK@gj3A=lWS)H7(_qIh>KQP!EF6EfvOcAHa8`neYb6xqn z`j3Edn6RYh&kvxpnA)s=1B|zUQxyl7Ukw!b=XDkozx{J&4BAcqyoQ%&`c-Z%w7-=d z(%g_KkLGnKG0a(~vtUxp7@hbh)N+VZ%K9@*@-;_^okr>HMFj{XbMm=dvGW5L`9{Qq_#q^*00WxY8eNxD*N!RcSWP2 zp(No%Oq_^V`;*rF|ABQ9toJ8ClB>MO3FkJ%^nIrisG=5vXnKu=QI;e7rxKyRoJs`j z>@(ui<;^-dKUTg(mm^BKTCUfk=eU6lr4k3`YFG4^r};cn%8I~e7;~V1PvgQLn{k%a zy3S&1?YG{b5QD6EBxCX@a~6kHjI@vlMCwA2f0Hfw{#h+4kJ^hBV9m=+L+t0AIKks@ za$-_6vkT2rl{s+ir(l~U-z1-2d{y_f}ETUO%59cz|aJ`fuy@BC!LQI1!oe z>9UKQvA&77XyLf^pHB`9!=#E$rU9*!XTPRY135jfhzTx<0B~MO`U$yrL?Fomqxs^1 zD4Tr?rm%B^Lh`)!0T9LvMzU zPGFmKxyK$^NABYA!6U$EyZEPHBXr_4A4UMN7w*ASn8LJZ|@gUTx^}PhjmxENv3Wo0$-HF@P_~s*T0;zopCfH}>B-Oen)FJu_Y6J^e>P-~9N0 zDCqC)3;7uO(m>jCgj3dDt|J7!80t$ZXP$&+6{?fKeGHAq3kVVA)W0b{{l5k|F-{YC zggAk{)QYMe^^Xf(d&v_!Z5mePS0LyA1Y|eVzA2CYPwHF5?tQif;qsP*U(#SkiN8;S z$Cky|Y}oWg{-v4c*WN#q`ZnQgC5Aj$QoApyzY`_w71!F@F}T!aNBLf)+ePvu6)xH9 zVr0^{JT}Ti#Hj^gPh2&O$X?FhBgXMJ&$;=I5!5YcED6%xR)G83xd~d}%KNk5DCYkX zlOHepQq`e8IbgiomR$@ufGW7!p=@%Wlf4gvbM&@#1uq#-uy6pTx!c70m-%tYyC0WJ z)gWPGXrDg$sp-EhR0}gCUSCq13>}LyW5eFUm@%G*Gw4f_b~RKm$bmG zk^BCS%2~ohI~+X${c+F+bt}Gb#A8+xd8y>@Vr{g{#OOh~mk5XjvHqL}>Qmx6zTir( zADL>u;wq@dbxXpExL$jCH7)1shm^fUy%#=wONg5gQxC zz2*ycvfn}&`hVNxGEpiMdub)2k=P3<)Dj0GQ3E*8&rQHpgA*_(NG9+7!i_jHg54J8 z^Qtdta&~+gGsIc{@?eZ<9KA8}*}}4JMEMUX9{1zWYCk#tNs5ELxz|1${T~&)+;suVJ4V56OL9lH?e*V^uCa9SWDP<$2eO}s z+1=Q)L{tMVSAU0DV|gSIiy?06QEKS0p-`yzw%E>3_npv~I5WazKWWagIF8OL_E{I7 z2MM*qIe1mD-nH1@T=c8xCYjPV49Q(jV)~CLN6C8)PhhFpifN*>=(25AB>NqUs@?D z<@BiUTa|x5=BDmD9}{%{y3M}5F0Hui$=`k4s^Yx%ptAq|IRZmAh0kV5;~qp_zd}$L zCSsTfmrw6ENb!K~(h&&g5$`au-6O<37(;iXktpy?f$#x)Zar|1@$LKGH@FX%AgE|%VRcuu^u-?`?}ju*D%xbAj>0Y*2lElQHiaj z-_G>;oukovr=ZV}zU!JIqGz~kyN$_c5YvArdYo{w4z)7bX)vF)wtL2g`iRu)lt~qr z;3@pBx3`noVo~Unz!+~YMaY|(osA>P18rs^LnC?|ErV{{Qme5gALB}jJtAIRyWnXRYInWjCIxQ8n{)$j z88>+xAK`;ryi(aUExe;oR--ww_1XR6aM{ZF$lf95%ObwT45NmD3C6$a&z!fv4Yg&6 zVY|5&`7F-f{(ia1K-uFN%czh#t>IOR^@r=jVsadUC9-t=e4*4n&j)lljifci59D7v z+sr1Pt@m99>gL38lnJ^GoHOh>=$T!3t*k`O4zROYQMuk#!g;UNE-`TTdnhPC3g78gx3CII8)+igXj4s&!n2jwQyn>lufHwJK3 zs6@wi=iK+ZI#wO4o~*HzYzTPs-9mYt9`ev!sDM3v&VxS~b-?<#c-Kxj0@!NKp3jfk znx{T?i$yb+4!AsXIg&Y^@=6|7Hr`vw?o;dU1ZozE+sAO#2apUlCv}r=l_mMLj0#+i zcE%s}OkHcB<osCuhs8OcABQ#iuuK7&}H(b4fx~uT?xg}3lr*5VydfJEz z2{AGD2RWqy5WCEJE^QDzd_9pNBw?+Dc^V>AVxGADmA@p8kCGrRym^8^8M}Vc(Rx#r zV5Nj=kUC&zEdfMccImt7cV%o%En}41(*#1i{G?d<^f(=H~0L zK3vszvDqcRFQzH9yh(b|n04$}g3CimQj z8>)SB_{-4Fjs$ma6qYL*E6CENv1zQY4Y>@%HB?QbBRqC1V_=7d*?J{8YatpB^HpGc z=LwvWW6y2H!TrL_DkjfrqMqMSy0NZdx++arQj($24BcBw9PUxl7tneByD*t`!ZI{GEez(Y#;Vv zne*s4fPx)2{Vq+&>`?HLNXAVI!1L-(P1?QwKz4{%{Nu2}b_yyPi+pFf`f#+VN$Y`~#v zALB)$r@ybd-M6eevu4#sDX7Z<&r~JzOq$j%5QP{pC3)FZNC7Lm ziZeBaj3F}9UBNi(g|NheSF7di+sOmhM=+by<~$}j`pVl*w+2)Dfnx`m?G|H7ylnYz z)xV|}Mrmpu#!0!>YNVx4`X{_N_)XxVTS3Z`T%tdS)^9Fln&?8gpvAmZ<9=UWuk4xi z-ej<_fbQEqv@+tU{X%c~HT=oBE;LrE-{iME6YQo$G>2%RllNicwT+9b2?(3WzR#zc zHPUun)tjnrO{zMiR(Wh}l;hQnEwKZ37x)OALSv};XVdDE^h~v#pSxC_`bASw7!TBe z@WE+nOeMG2NIU9#0`{YWL|FTtM{YAgINgkG_*0<^*2254YV9Llb9q4-UbT;)%DWot zaQG=q05!ya3jTGg8QV~k=%%~S?cpTp$Mn#{B*VMABK0kivlCR&^SRIJ?>?SlDb^5x zYK-|t-%x>0#FyPSoB>0SRC3=#lNY)^S3IMzf+ZP|h>XW_A0X7W#ScxEH@1xDI4Jmg zW_B^ej;^z^&oTebt91N@9-rRKP(?=-8=Y*`8H)ttaaKndQ1E=R3l|9 zKxrbn_S2Y5!EzWpp~m|AY+?u@(z^NPgb{R|`!#B$UyI{9`{p?>j+mc5rD0L7Ov&&% zc?G11kc~fQh2cA zBL|g=&7~&>!8&O`4p*u&gckhPMIsZPm*XYc6-RhMr7p>J0U|SyNGMJwzqoGZVOEBl zR1w7g4wn=C{*@dV|3a7!zR^$BYXl`fwiE*~ z>|MRPn%&}&7WNyS^++WvB@1s5c{p0&)%D0t6e7gPh)0Dz=y0M89_p$SnuSA%a)~SG zZ;Q{jdJdXhlbl;NlsvD3>d{h+X?{-!sSY`CvkRSV)hT1Z8gty>_EF<#H_dydtwo`( z{jJ}OHX286ypBhRHS-#dcgX+@Do)Msdt>Qh5Z1|EX9apK+O{r3`>oa%0$exbPY=eN z=4DG$1LSY%=4Bponiht(+X~z+JeBg|;FdJ3qIW%ct}=YGf_B-D?e3%GXD_V04!G#^ zS{NFpj3>z@)^ej1h8Un8st!{O`qcoll-jTDN)E~oZ^LK zc3dO8CG?FX$|W1+Xu}hXO!7<*m?CtB4`)?LST{?L4O$dnGStF5vcGBwa`vK578p{s za%C_HmfxuE##N^}DPJ+va;E23QTaUfD2Z&;&8AGns&qA4rk)>Zac4OHwjIU&>11B~ zx?1cyzhkD^@f)`C@Hxe?opNz$>ywo|f(+cXP0O2@v{E?{eY&RzfAzJrt=ML%yX&H-?UO82MsNpe8$)dTFG0yd|;hs4vJB$@tk!_jW z`WkU{8ecVqW&<1?pC;;`FCIHG3-OTgwOxE6H6Y{_X>YlG-H(5ozr{NgACeB`BcO}6Cycb4{^XKpMkpnkao{4p#fRh-`~x|K}sI*fUrekBdJJC*)Bhi3ldmlY`N(C9c@ z))cSlDXn)8DmOxRj2|e^PFoTlKKjP(#eUJ5U=rHcL+zUBW~o?gC9tWTVp*5mq;^!Gs*Gbud0IYwlAW!qI5*_j3AASQA<# z`y4H)p~dsECy5)R708p7a6T$9`psM-@Ws0nJqD3_tyDB>XTkhwexdV^`I4Q?~=T;tG+X;4r&DlZ&~}zhqNoY_X=6xh25DZb8a2A z!$r$&rKkF9JT8(fbI+Fa%+eH{@>>)63Jpg5b`c2c9AI2u^1|iDdMj(Rz4PiS?M#6V zqh2V_^u`@LcCk0Z-rVaNVhNo(r)__-tbyx?_!uiJc0EK*5l?Y+ex2Df3I*SJ?M)*# zK0t@p_vy4U2GU^@ED;xcj^-*Ob5CAL@q2Hdrp5j6;&*zfL?4G+t^PGNgUCQ#!zdm~ zFqTunt!O+o54WQLdkOI40$YB~-|x(8kW0<7a^iHGoo1y8ZRf9$#l^M^7iw&a8ZO}Q z*ysS(dEF<6OcBN3>jRpec~6Oz=;T5N?^M`OL&YL9$Ln7#R7$h4DjLQJqu*R%z10eH z&E$n+=lO2;8K#>l8)Of#Mk@9&AVtk;1U<@bngy;bi$)D(DmAa#PZ@IwAAMq7b5B5Y zPNda;SZlIyJA{W{3*5^G0hqH6O!lROQg5zpG-_Y$K&a&dBoS6MH`_C~-Zn};jIHC6 z0PFd@Yuz~)cYKM^M}1JSz8^XnEf66+U|?9YH1GnPvCCR}=a(*NtEH8OMVGmo&Hdi7 z1hsq0FRHEunrk(}UG})3Y_p%j4 zcs#lPyJ}TsDqj@hB>q&JMElGl&2mElYr?CZ#kja?v&01=qkAk85IB`FoWLA>>B+{o z&LNe%4xA_gS8b1ol*eX2f7M#jEG7<-aW_CwKh|4oSKsCvw-fvNJYi|xEuM#;;B2Ws zwz63xiktJ}?!_@L>$_@87M|?IZG((x`+f>tgW^>^$HkX*Ct>nJfSV5M1 zSms-gfkFxqCyAv-k zub_FKy^|4}qWcJ5=l8|)#jmFc4v$vA>=kyXAcw)rvGO|~)VJlnymi-w-8rsn-xTR5 z49zPWsBkptTP6sO`Pznx- zAc|p=b>H=)OY6>^-WE9HEO3BF?>N;t6y(S&el!Alvh;%vSQj}dJrOH7x4N~`(V1}8 z?AkCYJ?e3;$OU{`wM$RqHTpL>yo_uN8H`*@IkX8z^EAToSda72_<$VG$aDC@a<`RV{MbzN70cfa3#}tvS1P;grc~5O!tv;Il4t0v#Ckg7bE=n zP`3s0H=ldyW7kCmojEItSxt*Z{1{5!)P1p;(Z!$ll+kZ6zxW`M?eo}m*UzkE#+qgX z=?#}fiP4;US-*zYiE0ODdK=C~=ulNpHSYxopN}sRdQH#opxk$|@P(TuHXA zLm^;ZL(Za}eJ~2vWtJB~8SUb)p_?Id0-dkdVi*PMKychuY)qNZ!X%x`hhGbJKg z-k(KTbmj8Rskn*Ft$BSL6X?^8p4Q!<%X76Y);0(essTr41{jqOo3URW&VLB} zs4hw-h$?_9hOZ2Y6#=GZ$YtP~7V_KH5V3jrnLgXE!uUpHMmOp#JxfJg<#H+;*|C@@ zyYojpV;i)gsFgQoW-Fun2iC&^m>ZR5DUY{!GI+=H%V!6<`%7RpEeK_0H(*#;2Ur*< zdue_6+@`KltStqFm0wx~AW?GKVP@JeLHd#g>l0Saep(>!A$F%g9FCY@s(jf;z$Z4a z&Q-=)FH+Gzk%NwpWL)W+6}qe@b8Zy;xb+xRu;--LEz{jqFs64y-mPPv-V{kYq5FG< z2+R#B;V$Jc6TOgeLvQBhQf+uNO$Wc(mdfSSMDfOc!)^@|v3X*@J|8f!pq44PLPJ$u zJQn<75d4bn*85s)B$VFXs3My1dS!Y|fLH_w{XpTres8M7IzXL#NNY=dsa7S10@-g# zSjvkzCoIF;tKbuJ2>Q+1hEGS)X{PTHK@|M5%O2dX=U{j|pAW^V`0aR;6V(crKArA@A^8uyt%n>KLPw+ zz<=2#SLWc_j0V+)K=A)0PSR{xC^oX}Sf-$nC~;E;Y+&Lz$fYHfBF%iEl7 zRrB{h%h|T-FVx?tPRA~DT@ZCy8Y*{~{xCJ)kvJc1EEU*}zm1wSv7R`Yme|oa7R>aN z=Us0qdK>%*l@p2jRH@SR49lL!Jb=LU)l?)|8IfNZy!AVeT~n3wbeyBO*O&hknlc}~ z#yHQ_tt=05b)ChNe`c+@!+S3f`o)Z(4+T)%v((0Gb)1&!c^-* z3#1dX={*&JKs+34Y>Cg1TeK~|Q56k3fK&bDTS>dC{;Q=|@YERQbi@N{sVLhF}^WDbP!^+0hH*qvWk{~5u;M_+CduD&(30A5W1 nFBbj$uUh>1*=F&^E(PyIYnAHq+Gp?|Y5|lL)UW5snLhhJ0j1@c literal 23944 zcmdSB2UJsA*EZ@o7E~0DBA`^24pNk&bQ@woKzeV|ODNI_2}ckR6oJtDQ3D7_3lK^u z0@9n(LntBw0trnBB_!M(Kt09te*gE5|BgHE8n6>~S!?aJ=9=?)=A4Vr`>Kj3k24)V zaNxj6(B0b_2M!#@960c6?9qe3nL}4jUjsglx!l!vJ8*#J9p(3zSOFSl;0}}f_p}tW zwY7D0baZuf_4M@g_4N%53=9nojf{+pjg3uAOiWEp&CJZq&CMS?cwk{+VQFb;Wo2b; zZEa&?V{2<`XJ=<`Z|~sX;OOWG27{fPoSdDVU0hsTU0okOeCX!p=I-w9;o;%w>FMR= zkENE{QUf&P$&!r^Y{1v{rBG=J$e)n5D*v``1tYTprD}O;NXyukS9-` zgocKOg@rwR`t;eeXW`-D&w&z~77-B<85tQB6$OXGqobo^Vq#)rW8>oD;^X5JfRfvo zn3$N9l$4yDoRX4~nwpvhl){$u^z=Xe_~XTk7cXDFeD&%T0)fcL$jHph%*x7o{rYuw zc6LrqPHt}Qn>TOr^78WY^9u?J3JVL1ii(g(WN~qENl8g*X(U2`rl#iR=9ZS0R-p7ww6(Rh zx3_n6baVn`V6v;LtGm1VJy3=|fB5jB2PnhSy}iACeSQ5v8T~RaFfcecI5aeb!C;1g zGBJk*%BT4epiIt>j*gB21j_8v=g*&~rlzK+r)PjNzx?IP zmsz0Tzs}9g&Ck!{fU>lT$Kw|k78ZfBLRbRI*Y#zftgf#BWo_dtP=F(0a}_9r%{8E` zZxINDb)al)ZES380%dD^3n;{GB9XWa6cU*P6!Ko#0gB66R-OX~4Bmro-_Y_N#*LD0 zX|2CpT&Haal)8xJZGKA=w(QyrKBFG#Mi*$?CIQ0O_IW?MaC>>;^I1&=cSCmm?5xoK zj~Uqv?3LmD=+;KDc!Pyft*50HS#I(>+WvYDP3FgJ0u`HDV72}RTXHa76u(gyLuOU535vR8E84)kGb+BGi>3XPW3 z4Yhqk8sBNOoUirO*0s13Lvjg4VLhYcMyDLPE?Tk|;qFIfVo zE)dUJIrBwwPKooR%Mq4XLGVlz)#Rhty7CL=SXHM`5S)P&vnCZK=@g+1H<4yPQ4j}$ zhYdFgK};XTZAX^id9pHdLwjSNL_!^r{@m4=;TrVK;Uu3++FW^(?`x{{d~Cqd6}Np( zD%KbX_G3}g{OOhiN`k|=!_Tj6J=mNc7lm{*^hUU3Gp}W{*Cnl5o znrY}!O!Gye>56u83CXr*#a~r^NC;nqH`EjxMA0gTI6`v!z(m!1CsCt(&@`VHEh&BT zNyN9F&Y?Mukibl%vQ%&7UiJuWf>+;`yNIT2N&zry7E#cw6(Y-((KMJ#1%E(BVqT9i zRzQ3lMY64wLyo@OhG9Kpr>#tQuVg7pox4Qf9dPE!49ew;N6=ZsLf3lzFG6aBTxw3A zLPjVMn6wWuAJniuCX=sH*OZRPMNQVx^LyqoU`<|S)VUO_1g9;CI(9iV2E@iVXk+NQ@amwcUl(Cto2{7Mc zdleyCSFWvu;TmQ+;U8H;ZF6U)&+f)X#~FO#x-xB`d1+j@HsR4Ze(Bf}e0te*_=-;Y zlcN|#2V2#p>@S7+jjN^jg;kBPbr3E7?5b-(DqyfVg~b~s2MnSO#taYid#h`8+vEWgLNZXTl7VT1TExpL*LD?@Wp zQh|B08uib!L{MmLC^>~UZxCv)q%;+u3O4?e<#OdOw%+qe9aoeKA;3Q zz;%m7MBJx5&JXdgm>!Lg^Ul7zrZ#9ZcHg*AJ(~%etaVyu5V2wFCWV+s;psFYNKZ?; zQBYwuutgjRob5iTQ986fGi$Ls?nSL~C3oG@HuD&+J}un>OalWw0pscAlGRi^(bnE@ zy87yH3i0p+ypumR(@|K%st0A5VQ3Lstyv@-?WFS*??mrbpa{(0|$_OL}PiGx%}W3`^5PJT*Jno+~C{pOHI1K33y`Hg7&EuS!Lox4GG86vWc8kppu z;|oI`29;lNDRrAY9YKvfVIMnpj@ZowXk?dobsz_GNZB}L_o)5l!XDOQ@*)Darj8-u zZ-NXMV8`bSOE=+dfrd|5qmSH4`1*%W65^b6gF;N-BMkibU<4uc)hSEUtXpbU>767D zWyHB5h}$1*YrlS3SU1Knve9^&R?#^v^t_3h;ml8~O@jtDu?riqk$#OoW$CQ$afjfn z{kHKkMZ!3U$LLQ!TR-0gxlm@#a)XdgUBY#$1?FSD)x&%d(9iL>*07Gj=%x|%*b`O1 z*M>#q`;G`!j~Qh&5j-xdkfJF1lH@a1%ntD%sU3gmZ#?*PBlqmG^lRyro?KFI?XH&h z7s8mDaEu%OF~3O7Wj9Q$PI&{g9evA__#uya#(C-K3b0stq5w5kqk&^ z)DAN9we87uhVvIK(@!AN1>gS)d3_HKYp{6@+*MG6ThGe<65*Nq;G|)u9qX=a!WRVJTVajEWIy`>_R7ECsy4dd>W*H{rY918Q#&P}Qd<>{ zd=tJ>Q)-xTQ|>x&SNb6&v8C!|*d)qOJQ}?LRIhd%)9c|?2(0QBxe803i#I)igQen6 zj=-QRdhQ)=52Gww(H2GlcnLNv{p?sR|l(!t+5?T(^>O#NnM%2i*$)} zKB2dpGl#cEp%&5AU*G>`!vpAouWe%*=Rs`_s@AeYZ1J4c3}bXqvkXqq$zxd0hsyWh zC10}M7+-XIDePK$53bcG!HrqBog*5jqysP2%|*27NzTKg1rT{^>@q*h+fQv^I{66t zOG=CKD!s!n>Uni+PhAlpL=}iIVc5hOik&aR`M2tVdkC*K+jb;LW^*2$P&0HHXSvsc z`D`TEIkpl!9-3W;lFsNB6+V+l2Ms-$^l;Sklc0*f%n=M;Iz2{dQ#>m=B?IW$Wb}8d zR|uH3H+_IvgJeu&QS2&%iYUXFac69PPgMuC!t`yEiC;(FVqM~ga_^sFnmJ%*n|Q>+TfvMZpTy-2%b^g9g==VtH1^=Y%$54w#VSBN0Vj zx?L3huy5wjIFzt+n6li#Ran#1IVh7Uqj3fu~eac5obu1xg_2E;%l6hstYgDQE z+A?M^n&gABdKXC?l*ajGj|Dr3evZ)g#CoB+;~q#~GpA^c3y?siEz!VO{YzId#u)qY zGHRrynm-FuDj-d9moNg&!st}o8v(ucW6D5RJuzOiXiF|dWC7clDY3oMD~&D=F!vou z_I6EzhGM@-Vs`q7%O)!?}R9)4{LNVHNwX)<@C zDDgH$3K;Fu!9HlsG>-;>Cs*qsNnNkUgkjtgy=P}Ma>3(5J@qEUJBijb!!V5( z=NNI1?5B*8qvs;lu z{?zCIBfD>HJlEq-S=H+7_^Bn+98iL;q0s%MSU#gqiwa7C+B-%E#H)q~h}L)9;%YbZsP}${DYd z9Z5^UJ3NnYa#spoe=5i8bZ*D(j?R^qw0v}+f+x^^E0;7N&1y85Zs?k57B3)j2za%& z!4gOp-WvmmQcnv^gc~F}P)h5PNwE|(7+LSk%}yCm5f?h&1xYz|1Oq0LTQhy&Vs0F} zUHBY1idtmlje0jr(INVlwf$RYne52iib-PhpUTNp z`?Pne4fczts+=6>xsv7Ye+xW0n5Z4IZ?h(2s~4BH^x;|>-MepOjUgJemHKVmf*j`r zHD(1&yj23hnNFkdpW_lZ?LMgKqi;`_%L;*f@qQvz2HYTid*%>aAdkG?EqIBGqF31N z96UqH%jwWZUqt%zb#AS`pXLioEyI64JtfYvXyPkbr?oMJBPtb6(#vs~@@%=dyq>B&#_~^q076rc z1t}Q6&i0UYn55RQVa)n_f>LK0SUxE~!m*6Eb{u7#;iG11vymb+ivXh)ieS>;Oj%JI zxiengH4K=X2vY(xNSdFhKdZ47zQKxh1=FUz?tg%F2=H1Vm`qv8oe%H89%Dp0H#LyWswwq#)JA{v_^Nnq}=H|795cwfi|8y*`%x` z%7*cmtFoG;Lf2SO1hY17ybQO-MhzFwu4Dm$I8*#YVh+D7R zbgWy)EXyogO(d-`{#o0!1sS*(^3{^VR0Y7B12)%7Et)enE$J>smv%AHKgX?YM`FZJ zBi`d24aM-760GU{#^N`=5;f5HAKYyHNmn+LF<}$n0ulBdjxMyUDm*CqD=wT8Bh;Hd@JJ=DeVZ6)X zy9J3MNvB3Kr%cia*s_etIy$tL83ZMFBA79DLP9Fb#4XUA-!vUVl>w>S0Oy@uW;X|v zoOQdL?&!DYd#Merb6R06y$fbTudiU0>G?r$Dzg9U_Xl~+xy(_4PV&PMxt zq<6O8FOZrC(_uVe6n()%L~d{svo6MZVdT``U~ZwD8STsqps((UQi0{O-#?=!TrXV_ z!Yn*%hNLG>-kH7Lqvm-x4+Z_s%;HGon;`x$P#B40S37-et+_rAW*3gMuF=2L7^rra zc8X6&{{sDy`1;i|%2v({O44AWy9>6k`Gb{yjMg)!7BGO)S*ce&WuVu$R*AD<2jx2T4Ux|Iqs9sL}qS$6$6QS2G>5|^Dsjp z>eu8UEJ$ob4H#QA|2`rmhXPt-NVCbGU#>37Fje~1@ILL++5GE1n8d;7JSYrozEV*(YD`R7`GdY&jr#2oHZ|9+#it{4u2{ z``*OWFW!mQi~&oKJ|jykT5l#v3vCTnM;4G2t#+C}cH!EOfQdaC$0&d#U6~NCdz;|Q z8C#{!SslBQ>c!c)1*a%=QfwTw#zfjie)s~btx#lUnan4Ozs^6ys#fr@ZlOOfNWt*P zHHs_4)D;tvD>JlFda$7s>Jd`c~0T5h@eL3Ayqv+6(_v`3{3{V3=ZUFML zcT9J|=&n=Ie^DxLq^NzeeLLB4mr+r8(sNub8wTwu!@O(paxYP=D98Tcf;8pp_ahzU z>;3__0DS%15qNZ`9qGmDODLk6p1G&q_S`T~cMB}%F29KeW7VXpkh)1jRDq828VVngVP%T)k1SYGOk{oO z)^*p))E#$8z3wD-L13T+%nA|62;3!$MaY6UElB}>h;EH_DNoOL2G1bU)ThPlRYm+Rm16K3&LSo+nJBk+SKwN+ zrC>HTDr;0bELC~&U5C_4l=#rMlLIGcd%R}L$Wf9W2EWOPDoEo1hc~+DGNpr&# zfRd>>K@M~%bgwes7>{hp(;jN!K}7F_&mQfOJ{y@YoSLf1jvIrsE;bO}I)?@J|XZ z&o0whDSbE|+0Rx&U2G|T4j>-8?1qAAvTiK(nNe(5$Re)VkT z%331yekJ3x@qy!(JV~#~4G2+u?H$UK`m=YoLahjs37%99cVaOnIrk?#vxP^vq-@Zc zM;g#Qr-&);l4s)`IFNL}(wH$KP#r$*)SBR@;$KL*v-MOtHm9f>#8pRvB%kR9cuMas z_lfkynIfPlJjBNykbFx;!iYbWN1hVx&*CS}j11#?D3>@HUa~$s&gqsi_giiDPM}E^B-t!2Nj*Bz^6JPg< zSf=x9lt!f<#YD-*BPkrmsaw-=jK%Iw;TVipt*GsIwn+Jz%q%^ANb&t)J8V{lba&Ue z6yZ)Iy-RsVg^DrfRgCZR#}a_~L~=k>cXc;jv^MsPVYG?lrsoSzL2CKKYk9+dpea@q z-lFI2Q8zX*wP-O58}In_J-)N!xc3r|aEYfC632bqiGJ&51EOBFi*wyfe|M~PFzo;w z=lyH{E4u@&_6*0PB_=W!c+ELRIzK{qx2E*pFwykG;$s%xT3iQ(KR&*_JGP-d6zcNX zJHmXYGM~})=5CJxbx&ToNxA;rGqkHLck7{?`}1G>dX#c6pick!vF~8|^CSOt1%xaQ zR;G!%&o#1mAV`)xKSuhjG@YHCo}cR=mCPh3{|(aLnl*oEp$R4}$n7@O{RjsYoHCHB zpESKo9gz1Z7S1Wo`RhvAJ5JD3SX&Zn;v}J*bGGzr-I9GGKqpROLL*r*JbHxY=9~vT zcah^lO6_SzFROw%O!MOAz*u(W_8P~X^QKb~X*G2b9Y2k^?2ltxje*A0nx@+CPuovH;1or0yYw+Usq09 z?(oE3qO~&3-bCRQd)K1y3{&e^RIb?K?EH!JUar&`1xY=^QEV0+KQSC3 z3&(j)N>xh4$mZkg0!s}XA>naH8mimc{jM`578@3sWlSga*_lRlVNjZl!(n?5`hz=; z0-oXF7dWqshPpuPnKI-2lXXieebHpv8W*?sdE7b`|V9&2JWOmN(b*qY#srMKCetzZGQ1VQlQw<8EJ^LEZ7s} zk#?Bs=Ag_3;22_)kJ%js9>6d4cHutZ2YU7j-9yhUs(^nN(BjFf*k*m?`rXtzItb!1 ztD0%~5C?R!W4cgM+<--nZcqoUa=iABk~_+J;TI@6r4%>nM29G)yI{dHHK+ysf%R%H#8FosO;K0X#&3nJd0aUAS3 z^VXGmkRB~WHTe*{_cliDl0^qr&tzA

MyEei_ZEi4HyE15Z$}3`^`hLB|fSCWg_vb%q$1NMH^%pXY z`~ryjj~WSS53+HcH|?^(>2V7((l#zNVs7R|B^S#_Tm!{CZe(i>KizFrG8S(y>Rb0thQd_us)1Tm5N6spc%&d#B8vo%}S%>>qx%k-j$)DR>!gmOC{7h#TZ=_spV;yW}+*?>r{_N0k`KIxzY~%^B zhgHpml-R+w3%j}>`oPlPXIEmsgSlO>w_6Vd;QjfpeH=)Ee1N_G=f^%8{PQFKbp^!U zN*V>WFPKdeeAB^$iOZXv9*eMtT%uK*vz3*Z#n}^puW)tI`89akmsj?lyJJq?_-M%9 zut!!GWEi!F$gccLXu4GGpD+>-{BJn2!AeaVGq`VQa({+`z;__ZC!{65vdF*Jp-J?U_l0UUbtaa9B8qt>(%>;=eoLpY7==eqvi< z=U|mzIj&th%DrXvC(&kEXzIkzP$gi|0k0_lD_xT68VPB z7vtHEG}sI7;hlJHVrfDRYBbeEX<2Ueg>j6DcLjGUP1c>6H%bLV<$9f-xvQLK^E)FW z_NFwRWF7=dM~?m{^d3}#S1L;MZ2h%zC1;`5i3oDWv%J7>O{ zFW&-DW~mOj2(LMT@#@LJ-a}D=AEZH0hy$@0>fx^uEC>u3be+*0B5^ z%T5*v% zhgxFaCq9#Oh8?UL;y#Lk-kdA)k0M}Di&NQqw%bjZ*!dt2w&Gr{5ClbyvW{dgJC?7k z6y9D8mzaUwD7P$D%HYF3)k-YE>&!CjPVI~a!D?BBFsdO~v%}j#z+V)^l{&}RP5*)C zp-*zHV|%SL#)VB?_Ffcq8{uWWyVXjQ-H(b;I(dtbeNmfvPs8&Of6Xf!*VU27$i-yo zSp{jGH9Fp_c`3G9pBG<(@9jyonK{uFZ!L5wPUny_`RZLw#u%lH>OE7Wi4jpSG&;iiI+E0g;W(c1fKuZM?Tw+Ob&Dou21yH2~~OoC08dHz;`9#Kwowf7NkO@wTn->V0|5#pQ-H({fSN%ez(+@sz0_ppWOyZaT|8maM=5(9Wr` zw144PKD4A-QRy-5u;;#)^tQ>%zv8_L+*%T|BZ5MK4W-zgd1b#PI6}{!5LPm#@cgLw zYrLb>P0kL7zr7JYiOF#!_W2M6<~?KnuGsd*D&-r%ONu0qw+{H<@wD`hLVDdD1Y>yu z3NBUyC@*vm<$WYqtAyDQDAM{)xJM34sFo?cAgywHryW7GBQF9g&lG`Mr>Rv!!FP)^m$7d`S|F)T)#qz6`DSX;*Um+@{iv1q>wLyQCrQWnpV$O?&oKDq!BvU z8e>FIH6i>;Crb$laZ^-i=wN>v+7d0wwg|OWUFCZIUAG;htYf!bZl%DNd^(LUsKITndSwQeE8Qt;1!2n54W(xs2Fc`ZFA0 zvjE!jJ&%NW>iPU6^_OA5@FHf18Lll~MjM*_x>abccTInlA*4;roV60%+8aC9<9tk#utDfI|Rlt=d zFJP=xd#cvrySh_ZQXL!1t?HmcL`kCQsdeo0L8%1#*%Yv}_M5!D*QGf35aTXoN)b48 z&&0=2a2Qr-Fxbpnb?z8IAMN^5%r^Awi?JDFW>vSv_a7m85Ndnvv8o-~v3Ta5b6FW` z-;)F>l=JT}Z+AhCm@>QZtE~+l5f;?l4*q*)V7a3E-xCA(+q(5F+{Eg~w59d4cvI+} zNDAK|jP`PZGP1Ze2>lI#+`FwFi>cN>oW9OQ-md9$B5I(913k^-a6FVySI(VysNCKH z0ep4sJ-+}MvY9kPvKlr^@7Vavn|m3B2zSS?6-zp*hBJh8zbox`imn&t&HHfLl855> z0gx`c{T-L$0*KGB^LX5IDmC1Hb|(((%Z3o~C&43t90P^`;(#o(}e+ z^0Jp<&aaZW^Z`O)91Dz(l_heul91JRV?kJ_Sf{-OLvhGv-<>i>shx2kME;+o>;b71 z?6FM2I8B8diEqQ{`Co>Dg!d|_J%st+27j3MUfVPnM|eN=RW|g^@=s$uJjG4>N;G6& zi3$>jlD`q=J!NSrp9BA8Z;sRNA=eA$0Y4+7p^qpT|qq#Gl3KRS3nx-%=@#(f>EOfH7d^qc#ZrE zK)mJtpR^`0dB&Sb{b_e2X#SR*y`VmgFc}=7LP3`K!LwlW_F(KS)TWn=&DwQ4D zjoz1ton6o#d(Z3UDZuci&nQ|7FVG{2^~}*Tj7p_of7$i_(>Tk%a`+Kxam3bKNIjNj z8>{gxSic)>_#UedsVg*SkKAiz25A~C!FU%w!2OT#JP@Jy9-j9-ePYnMMR5;0jI_dT zYJ3R6o03fIPsp8w=KR!cNMIb@S(aBtk$0uA))un^N`i`-U0a-f*IMK2eSkDVQj1=Y zA+uB7mZZ==mSnw^xtjea-V9(s?N{w`?7Jig07*hb%@6@?FvXwf@`E*}U|1xw?mTU< z`7~@_2AW`u8L&N;dwCB#0H%#yR2SK28$|(_C;kR70q}W3b91Xh?iuI6K+T@RoB~tca892+Xs>LfSb^1n$^F z>x*EGeYFXAO;moQL;rZNS|@{+Wy7zD@b~QJcnsH-zYyvZ@+s9pyLjh0t|FdfT@$xD z%&06TZ%6cuVjv7(Z4+~QPl!I=S#CzFC9nv`cX?N>CrUcS&~TD0L;CEoKXT0WTCa#7 z#ruSHCUbNi9l-u9gkIu4gOU86fo3JajL4=|%IGoK3q)A@pF0|H;)j zHYK|sG-uwQOw%wN0snP0P`}}av(a_yO0zykH^@lVUp7&kyj(x^LVf4jep^5sxp|~Y zeW6^gh9WQ&nbGAS0vXHacU2t+(Gp(mZyZm#0Q^8T@|vm=_HSg}Teka`_HOvMI7lL;ktpngsNXQ4IjV} z3i4K+&pTWpM)1|)O*8M^T;Sk+Q4&@mU={2z(aJG{E}wJzHLAfx3p-3ap8a#~_J`i9 z8&Q%r2&Ax-XWZ+L!Ql08*}pfw34uORSl58?_n@_nwc@|%(7houeU95fz^7i|{<)VW zQ}Jh*bJwO3m7_>bMv8;MfP+!iyHMvTxZp#NS-GxHBJEv{pQzHk86#!1nB>}rAje~A zsA!!uM?5Pe4mG%GsBOs~ijo8f_z8ApuqV2Y`C0tyBj4mR7}qRbdR35ka`xVmZEg%w zxkU0T`8<@dE21k~*;Hrp&wj<-kstt$w72*09tMQJ?LsLBtkB;!p?q8X%D}doYl-UR zKTm85{jt`|ig*(Q&KkN)v+OH~*n86PXj;YR?-X^)q^6@ZXbi_vNh;SN61| zaIi)_GcgdAu4UVF81jdKPrRd7(`0pbhwFFdF2^r^0+SV{1^y=Avq<9ovD8s;v1 z_tc8C2-Yu3dQoc1UTLYK-x>Qt`HiH?KeYJq3j1Se`UpqSkc zTb^}0=R?+`1rjIznS1)DK=q!8`tu>>{S@JdYC56no>{9tlsvkh zZ3NsyBNRGjnjeO^mAN=<^7kmUFolr)6LEgWG3646nf>AUluZvJ`IMZ|ebm_t4etDN z&S*eT+5<2&3b{kv@kFk%?{%8zCZVg#Fp(?Hl)}`_s#ILDQ^UWV=myDBymNn5o6`zy zeVRH^H2fL##ProLxSNO z_7EV7(*dt;b=vh@?oS{b*UHqm^XXU&bZu!L5kOyB?c6VNn-!o>&-u2Sfn`76XWwK@ zwbcA|L!PkF_YHY&lAr$+y57^CfNZhDJ%f^gkhLA2+Oc~eq;BbDpLko!)<^Qqh=8sP-*;tJa5pL@ z|0K0%&m;j+`(nM^bQxABb#qhi4_h~|`p#4<9!43xN^RM3*a8uta+g}kmL8`me{mIh7P6Qt=1#lUZUO}*0TimFMnIHB<)xz z`DqMEej7Wo>P+seZT*nG>Mz%aU!3;&~B}F zyPhG}g1pXp^!GM6kyr}(x$xsAPHFj0mc!3~ z&2&8znVN3>{8zz!ohY7<2Qh>!Tt?)zLtm)WuqYBDU!L@G)=>}M7L&L9H z{lmlTEifx(@Mu1ImL%kqZHV=ADml2x&-Mg5Kj62KCtSit-Bc!LSe98tQol`T$Ik22 z`g3}h6t9&N4aTxDeq1D`@vHq+XtX@=ezT=Bd(l6&eYuMnUaJflFY28hMD1< zqzf*-X4#x}V_WwLV~61gJAWkJx)ChBY*N@hVp#CyMmY!TrC3hVLd~dvI2Pp}hsviV z_xD-E+c&TW`foZ1ik3-v&*h~n%0w2JjTV65a}q}uTQv+V;D}nHoFPGRKENL@DO-m{ zjhCv$a+V%(KE%WYYE;a48ok2%7)dLC{6p=w?hbE^J@z4}=M5Xy%2(8Tqs-tT?{?^r zMZw^F19>HcJx97H9VQIg!XMh1If+q>la*TwJSc5FyW_mL?VnUhmM4EH{WXr}@Tmlg z*Bhyqv+83SU+^~Z9r=*$ynO}t8-1#UtLlf2#Lef6=TC{hD-;pukK%QGclOqUC%11Z zF|BMD$jPS|+A4pTmFq>NH4FDZ_y}uK>oUFz6*JU|T-nVLi`ZTsoq8$IalycDTK3K1 zx`b+f2J)6@2=y}gvN=Jpa3Lhlp7wA9aD|g9 z^dbVheC0>GgVR;PiA~LcuBQ_puM%A)*rc@rEK+o+xjJicofoSG>Ty%v5-C!e^Hu}Y zAnlH&jxop^?vvyggsjN=b+^23kU>^8H+4SdRq=0z8?EUK)|tqUO7x4R}UJ+Bec&bMUimZitxD!Wb1oLkiFXmx9Y{{ zNPC%h&RJ>R*uLlLo0FI`!A+fHdD}RIj^?oTQlm0R*DL}tX6GkyaaJAHBZU{&7qO0L zy?JjU>pH_jS6DQnp-^$|(Gk$`kE;U6blIFG3yps2v8V7RB^UB2ce*&luQ<6TNM(c5 zqf+*jyZcB#it#?b9alvr&TwM898=IYkYq0v$OO{f=!glH zsfj6}t>gqT!u#zC+5;{_?lC+#F27m)$H-E@Y&;9-X`(PldBaHEI)Z;YL2Ip2@Iq>U zlLKMhj3Ad7_gb;XKS9n?mF1D#Nol9}VrdC$_O!afQG*6+|4%O*uVqKSr_Sn7%T46S z4!~Gw*1d)w$z?BjyDa#?d$RwH$he5-J?E-Yq>j zt;7Yr6Xk=6dZP+4W_ZxllyK?Y8Q6-A`GfO%2iTMpUnu&1x_lB1vanG4lwn_`WBe$0 z0JqqQqq^S16gEnix`bZ1u8hD4Ecc(25qrK~>>T(;&?YT20ONJ*?iPJMXkP!dw{`rj z>b^q@ULW<2pecQkoP|YKh_9<1Eubq7Q#fT!OY*YuY3<3d(^Ak`&`{#+%)HW?N~pZ_ zxRWh5D4|}BeCeJMF8&V{U#alG9#GXm*G?0q*3VkugE=(i4?4+dohJ24>+f!D=ntyI zuasky6U}zYG0o3Ae$^XG+P0QI(w*@~x1npiXPX-5rjmwnMcGJeHO}G!m-c&= zw0Uy@rW~E*b3P}{rJ6d04h!RX3D^w`D*D> zz1<-98!ga8Ifi|6zH=qq)Wn=Hq@`cX1IOgH z^lljIg={s2ES)RXU2^@LJb#g$f#4j_^!Z->V~oQ+S2BFe{J*%qVSKOT0mLwb1As4wIw>_(D6Nt#WM$2jEe#!(?O+!Bo%|Zi; z;3TY9v?3Bn8zfXNg@@@67NxmoR(MUB%uOa3E02WgziRv(q)~@UQoL~=zU3FAG#u`x z2-kX=#Z!;-xd5{N`c9O%xx}$viSQV9K>j8H(W0!y!^p_?myw*Z?Jmg<;K+sNP6Nli zww>=yG$VF9lz(JM^wOx{#aFr-&P)nr2t}&}mOj=(tNTpDi-;Y138IQ#uRyp2mz(p6 zlN=g0G>La+)*tmRU|8g+n=V5_8rooXW~?RrTph3@HNN~FUE*%;N05A9-~qqfD=WH8 zlpET*Da@4BkklG0t@U`ZJ|UO@FHwc4Fg(LpAb-=`;EN10iVkXSZ{S#Kw!W1~WXU`2 z@6S@a+qoAVY&nmgeX0-{eP*_GDj{|L#rp(Vy(7I(84;5#%+AHRzwqis9#g&pAHaz& z3-TTqQTU_eD9qD;K2=^LI(6!l}!XnBPwpekGy}TH*)L;f}-!RCMe{{9X{NaRR z7BFrvI`ZEy%e>vZDC2+|lsQif5nTu~d-2NcK%D7^#~6?Nqi<_@^k)RtZnl%}onO4U zai7j^!No(?_?Hqj3G^_-!8574ISEkrTH1thleXg^4YeS~gnqLm`-RudWO25oHw&0& zqSq=O&JSpu&m&whQf|Fe{)+8Yu}Jn(v>bJXR=VJUE49=G&g(YiYeGTxtV4`hORqGn zI@4TazLcLUIyN>mN|08k?j?A-R}{m!o0?Wlookb;&@QX>d_hj7|niya{C@m%I`nlYBUEH_C{2q7e7dv~y_|t9ba`5NQCRYkMe47M zLYcH!N)_DBqF=2(IaLALZKqTBbLMNzLCXj~m&Q$-X%OhR&Zi0z%HLx06AV-tb;_sP zqBwHfx_08Sz?wmEmJF?sgs*;dm~}0Al3T4u2uY^34Ymr;BKV(>9wVroVECA8C#f^_ z(ci2%@}2$@31VQn7mZr*0SI;EpmQhL+@;k0##CqDQL8`l^94RlMSbeY{6LGm7t%HG2J@TuMz^mh(?G!} z*`dYyLUyrB;ew;3v1ygN?ym(?_MJ{edY7_;#nFuqiu8&NFEKoAOd18gUK+pwUmp}% zm+6tgX}D45%YtNL^$b`ExDdDAvFwq$vn0$Sy}h04KUbtIJ(!ngr}BiA$~kdVm>oPs zr%{zX&G1wlJ;f^e1TFg3&@HygtLBY*x1T7Qt5({z!^CD_y%?yw*ld zs!on+e9-9xGJqXjG~He}3$n=Fw7bmC^Bi*$cnJrBmbu7pXV1#%Yu8kWS8XV~F0J&N z%rwlbvB^1a7INovV|s^qA+^}|b+2Nl z#KDzJ*91HwocnZdS=T32OaFG(wG(D%xViO;+b#ar+Sv%_LBG=4tK0Ls4z=4-@8wyJ zu4Y*pC$KC9Lg^jW$3D!y^_ca3`-YlH>a?vYSr1Lni2;)(t8%uV(lNK!$ZfpUV)yE= zl74i(z=?`!ZN*X>6GuHkrBNLs5d+?G-GG)iB#~6U1VvXIxFiZ??q)c6f9OfHcRB$g zeBhk*qK21gSPFL$gs@ z!G5;m!5OC?HgnD%8iD6ST65%~Uo5T({uR6rFE(Smarcis$ABb9DqBOh#_8VSmV7>b zITG^b7m&8Mp5JSqr;Ia6oZyg6m0Dakw?`)%>k=0&IkkD6{-euvCV>kM_kELEkqFbR z3YP9*O&j~_4&MveJmD#es{W=2crpZBfOkRabCmurY+>jIrLpD-ypu4v@s*8TbfFsJ zrB_`00#gj(49^d0hr=ZDG>n(I(|{DgB2~C=ufDuF!SmTi_U_F0&)Ph9!S;;yFNcf2 zJeMHa+=8buM9Q6)^?2sv9}wL`>tYPoQRfY(kJ{1dSvrjj=z(410fA}8-+^q!)0E+> ztm@Xz)&^?7CRKyru%J|Uj#)gX@jCK$k1?%406eWz&A3?cu#C>z4n_E#wvWG`G7HyI z7$#ZLuKK^f1j=jrgas`|iy7U`7n8usK<6IoWFLOGM@2MDnH+AyeMvF3`>(SH>Lt;Y$pil>+N+wu>7F-=nL%KcSJt>|T^lV^?GNgjl1`FRjO zAK8Q{rHcl1%Y;S=JfV4Y0vSx2*eN*Tyn}dLb1$nS#pE|T0(RE9mN;E&OQ4@#x`Mo^ zL66CFtvtM-JpKtrM~gM@&(`+R;1q|G!%83GF)AWLjxSt<*z3o@U7yKbWL-cre&0nc=c4I^b0LFWsNs$WA0S`! zOE8)44u|Fh*^(HpbmX-l)I?JI9S}BQK*AMAvwc20axJR#C0hhSArq@GH;VT9xZ7cT zM-B5$U$ZcA^UksA+L!LLJ^I$|VENLXbe9jZ6=4cdu*ySVO{&#+QId~QvBTwdE--~a z)Ju14A0I@lU5e3as(-RBUKhD(cPY>~`n2z9U+zTG)J1Dq)yM9tfFN$j-gM`&mLl91fmpUs+ zs(rL|IPQ-R85$FeArn}wCnabRqo^FtS=hPjI(c?ru}?CZ9ORFj#UUR*Lfk{diP%Kh z1Gg#<=N)q(yuaudz8=sM2XrcsX5Eh~C<-?B_^Bq9)^7{96DwNJ0^u;LxZKd>vya^P*lV+hoN!v1$4LX02wE zRT8IW(IK~T%ugm|`;pwra_ekOq}X7BvF}g`H?_5qgjaFO`-dpohsmSn~NWvAS!-xT%*6asDr)0+yqK}be%p43dp#5*{^hl}Qo*w%+2LOo zP9Fv+iK@~U%#vI4vEs!h$@uf9EucS{V(wv)MQ8dJ~@Xwxf<^R2`d5OnBAf zW1AfFp6%%EWKxT_mt}oYHNz2(+g?mU%JdVV#@WuEbW5$v*Nlt3q{P@7-2`V33LX>A zy&TVle&*oK~h0<=XuUF;~aR!oU3#VrI)%!0l^BkE}$M9yah*1u9nNPAG7uJ z<;s>{IhNx&IpPZaJR23WM>))`{A6eEL8iy@ zlGtXb-}4E+Fw=wIdD=>?qWVSVcmTX_M$}q;#vo4p7x~1drlsmnzkoP`&V&auWM22bcWjpm-RC0FJNX&8cgn@wsf3(r zP9R;{9t5uu0~Sa2EFaSyL>J4IbdYbtoezT91t|b>mA*9J|7Fu?Ni6|-J~=>$@2rci zT1QYpno>l3@{982gHor~&h%9WLm3f8=`2c|qmRX_du;!o1y}s}RLf!Rne{=2*Hiqj z&h#q}H*AWRib&rjnJip&tY*%+ooUO0W>24$IQ{$npU0NW^qODh*}vbQ|In%hFSeP) zFn^uYRQxe^np?k>rcJP>Q9^EOn_z3*NrSZeyw{iLssH?){O#a1@v4g!ve!JK&h{8D z4m!W*L&cXqcpHV2#B=llxk z^1zSI6>UcQcOKWT-m_?L-?i=@ zH-G8LrBSbT-jc0xbts*@eQySEB&@W|rfB+IJ z`&J5DeR4nUymCg@k{t?GhZ;jC>VJ*jEoiuK_4>!pPK(qxNy&aV(EIwliOZLgry&Us zetlg&F?U(?TXQxwzp9~Hl+-wrO) zb@a4)rpG@0OWOb4jC;?$Vcg_?Pi)5izV^mb^h1B7x$4UQ!9`8-lZ4sQ zW~){Up9QAZ|3}`$WOcvd^H)Fr#1#NI=uWEOT z-2W3f5u2@VUgP-l;H=!8ylo#&*?eZ?e5g2~OjQw7mbsP3m6jTI2e;n(R6!m_Q>1MtAa-E>vzZoT+Ho?~gzW>;}YuCOhYq%#f*VQbl^8I>J zTz-mxmsr?cFCSpo0(*Yms}jMzK}63F*c*g&4AI|N)elF{r5}E+W%F(X? From 83c1159563b243faf30cfad9286153a63448ef6c Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 22 Jun 2023 16:40:18 -0700 Subject: [PATCH 109/193] Bust README cache --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 42c7fc26..22a086aa 100644 --- a/README.rst +++ b/README.rst @@ -30,7 +30,7 @@ Performance In our testing asyncpg is, on average, **5x** faster than psycopg3. -.. image:: https://raw.githubusercontent.com/MagicStack/asyncpg/master/performance.png +.. image:: https://raw.githubusercontent.com/MagicStack/asyncpg/master/performance.png?fddca40ab0 :target: https://gistpreview.github.io/?0ed296e93523831ea0918d42dd1258c2 The above results are a geometric mean of benchmarks obtained with PostgreSQL From faa94d1e3ce103c458dc0b5c5f16dc15f3d3a1a5 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 16:24:06 -0700 Subject: [PATCH 110/193] Move most metadata to pyproject.toml (#1045) Catch up with the times. --- .github/workflows/release.yml | 2 +- Makefile | 15 ++++--- asyncpg/connection.py | 17 ++++---- docs/conf.py | 14 +------ docs/index.rst | 5 ++- pyproject.toml | 63 +++++++++++++++++++++++++++- setup.py | 78 +++++++---------------------------- 7 files changed, 99 insertions(+), 95 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1eba94a5..406c3a3c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -152,7 +152,7 @@ jobs: - name: Build docs run: | - pip install -e .[dev] + pip install -e .[docs] make htmldocs - name: Checkout gh-pages diff --git a/Makefile b/Makefile index 7a09181c..67417a3f 100644 --- a/Makefile +++ b/Makefile @@ -20,17 +20,16 @@ clean: compile: - $(PYTHON) setup.py build_ext --inplace --cython-always + env ASYNCPG_BUILD_CYTHON_ALWAYS=1 $(PYTHON) -m pip install -e . debug: - ASYNCPG_DEBUG=1 $(PYTHON) setup.py build_ext --inplace - + env ASYNCPG_DEBUG=1 $(PYTHON) -m pip install -e . test: - PYTHONASYNCIODEBUG=1 $(PYTHON) setup.py test - $(PYTHON) setup.py test - USE_UVLOOP=1 $(PYTHON) setup.py test + PYTHONASYNCIODEBUG=1 $(PYTHON) -m unittest -v tests.suite + $(PYTHON) -m unittest -v tests.suite + USE_UVLOOP=1 $(PYTHON) -m unittest -v tests.suite testinstalled: @@ -38,9 +37,9 @@ testinstalled: quicktest: - $(PYTHON) setup.py test + $(PYTHON) -m unittest -v tests.suite htmldocs: - $(PYTHON) setup.py build_ext --inplace + $(PYTHON) -m pip install -e .[docs] $(MAKE) -C docs html diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 0b13d356..d54927cf 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2007,14 +2007,15 @@ async def connect(dsn=None, *, :param SessionAttribute target_session_attrs: If specified, check that the host has the correct attribute. Can be one of: - "any": the first successfully connected host - "primary": the host must NOT be in hot standby mode - "standby": the host must be in hot standby mode - "read-write": the host must allow writes - "read-only": the host most NOT allow writes - "prefer-standby": first try to find a standby host, but if - none of the listed hosts is a standby server, - return any of them. + + "any": the first successfully connected host + "primary": the host must NOT be in hot standby mode + "standby": the host must be in hot standby mode + "read-write": the host must allow writes + "read-only": the host most NOT allow writes + "prefer-standby": first try to find a standby host, but if + none of the listed hosts is a standby server, + return any of them. If not specified will try to use PGTARGETSESSIONATTRS from the environment. diff --git a/docs/conf.py b/docs/conf.py index 0ea293b8..cb371299 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -import alabaster import os import sys @@ -39,7 +38,7 @@ copyright = '2016-present, the asyncpg authors and contributors' author = '' release = version -language = None +language = "en" exclude_patterns = ['_build'] pygments_style = 'sphinx' todo_include_todos = False @@ -48,12 +47,6 @@ # -- Options for HTML output ---------------------------------------------- html_theme = 'sphinx_rtd_theme' -# html_theme_options = { -# 'description': 'asyncpg is a fast PostgreSQL client library for the ' -# 'Python asyncio framework', -# 'show_powered_by': False, -# } -html_theme_path = [alabaster.get_path()] html_title = 'asyncpg Documentation' html_short_title = 'asyncpg' html_static_path = ['_static'] @@ -66,11 +59,6 @@ html_show_sourcelink = False html_show_sphinx = False html_show_copyright = True -html_context = { - 'css_files': [ - '_static/theme_overrides.css', - ], -} htmlhelp_basename = 'asyncpgdoc' diff --git a/docs/index.rst b/docs/index.rst index 93671abc..833a7039 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,5 +1,6 @@ -.. image:: https://travis-ci.org/MagicStack/asyncpg.svg?branch=master - :target: https://travis-ci.org/MagicStack/asyncpg +.. image:: https://github.com/MagicStack/asyncpg/workflows/Tests/badge.svg + :target: https://github.com/MagicStack/asyncpg/actions?query=workflow%3ATests+branch%3Amaster + :alt: GitHub Actions status .. image:: https://img.shields.io/pypi/status/asyncpg.svg?maxAge=2592000?style=plastic :target: https://pypi.python.org/pypi/asyncpg diff --git a/pyproject.toml b/pyproject.toml index 71beae87..36540bd0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,68 @@ +[project] +name = "asyncpg" +description = "An asyncio PostgreSQL driver" +authors = [{name = "MagicStack Inc", email = "hello@magic.io"}] +requires-python = '>=3.7.0' +readme = "README.rst" +license = {text = "Apache License, Version 2.0"} +dynamic = ["version"] +keywords = [ + "database", + "postgres", +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Framework :: AsyncIO", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: POSIX", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", + "Topic :: Database :: Front-Ends", +] +dependencies = [ + 'typing-extensions>=3.7.4.3;python_version<"3.8"', +] + +[project.urls] +github = "https://github.com/MagicStack/asyncpg" + +[project.optional-dependencies] +test = [ + 'flake8~=5.0.4', + 'uvloop>=0.15.3; platform_system != "Windows"', +] +docs = [ + 'Sphinx~=5.3.0', + 'sphinxcontrib-asyncio~=0.3.0', + 'sphinx_rtd_theme>=1.2.2', +] + [build-system] -requires = ["setuptools>=42", "wheel"] +requires = [ + "setuptools>=60", + "wheel", + + "Cython(>=0.29.24,<0.30.0)" +] build-backend = "setuptools.build_meta" +[tool.setuptools] +zip-safe = false + +[tool.setuptools.packages.find] +include = ["asyncpg", "asyncpg.*"] + +[tool.setuptools.exclude-package-data] +"*" = ["*.c", "*.h"] + [tool.cibuildwheel] build-frontend = "build" test-extras = "test" diff --git a/setup.py b/setup.py index af0bcdc3..269e2fd7 100644 --- a/setup.py +++ b/setup.py @@ -27,30 +27,6 @@ CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<0.30.0)' -# Minimal dependencies required to test asyncpg. -TEST_DEPENDENCIES = [ - 'flake8~=5.0.4', - 'uvloop>=0.15.3; platform_system != "Windows"', -] - -# Dependencies required to build documentation. -DOC_DEPENDENCIES = [ - 'Sphinx~=4.1.2', - 'sphinxcontrib-asyncio~=0.3.0', - 'sphinx_rtd_theme~=0.5.2', -] - -EXTRA_DEPENDENCIES = { - 'docs': DOC_DEPENDENCIES, - 'test': TEST_DEPENDENCIES, - # Dependencies required to develop asyncpg. - 'dev': [ - CYTHON_DEPENDENCY, - 'pytest>=6.0', - ] + DOC_DEPENDENCIES + TEST_DEPENDENCIES -} - - CFLAGS = ['-O2'] LDFLAGS = [] @@ -170,6 +146,18 @@ def finalize_options(self): if getattr(self, '_initialized', False): return + if not self.cython_always: + self.cython_always = bool(os.environ.get( + "ASYNCPG_BUILD_CYTHON_ALWAYS")) + + if self.cython_annotate is None: + self.cython_annotate = os.environ.get( + "ASYNCPG_BUILD_CYTHON_ANNOTATE") + + if self.cython_directives is None: + self.cython_directives = os.environ.get( + "ASYNCPG_BUILD_CYTHON_DIRECTIVES") + need_cythonize = self.cython_always cfiles = {} @@ -235,47 +223,16 @@ def finalize_options(self): setup_requires = [] -if (not (_ROOT / 'asyncpg' / 'protocol' / 'protocol.c').exists() or - '--cython-always' in sys.argv): +if ( + not (_ROOT / 'asyncpg' / 'protocol' / 'protocol.c').exists() + or os.environ.get("ASYNCPG_BUILD_CYTHON_ALWAYS") +): # No Cython output, require Cython to build. setup_requires.append(CYTHON_DEPENDENCY) setuptools.setup( - name='asyncpg', version=VERSION, - description='An asyncio PostgreSQL driver', - long_description=readme, - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Framework :: AsyncIO', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: POSIX', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: Microsoft :: Windows', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: Implementation :: CPython', - 'Topic :: Database :: Front-Ends', - ], - platforms=['macOS', 'POSIX', 'Windows'], - python_requires='>=3.7.0', - zip_safe=False, - author='MagicStack Inc', - author_email='hello@magic.io', - url='https://github.com/MagicStack/asyncpg', - license='Apache License, Version 2.0', - packages=setuptools.find_packages( - exclude=['tests', 'tools'], - ), - package_data={ - # Cython sources needed for tracebacks - "": ["*.pyx", "*.pxd", "*.pxi"], - }, ext_modules=[ setuptools.extension.Extension( "asyncpg.pgproto.pgproto", @@ -291,9 +248,6 @@ def finalize_options(self): extra_compile_args=CFLAGS, extra_link_args=LDFLAGS), ], - install_requires=['typing-extensions>=3.7.4.3;python_version<"3.8"'], cmdclass={'build_ext': build_ext, 'build_py': build_py, 'sdist': sdist}, - test_suite='tests.suite', - extras_require=EXTRA_DEPENDENCIES, setup_requires=setup_requires, ) From 6043d913651148a216b5022708ad1bee236ca0a6 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 16:53:47 -0700 Subject: [PATCH 111/193] Fix closing of connection discarded by session target attrs (#1046) `asyncio.gather` takes a variadic, not an iterable and `return_exceptions` was masking the issue. --- asyncpg/connect_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 8b29c0fc..6d788f60 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -1003,7 +1003,7 @@ async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): chosen_connection = random.choice(candidates) await asyncio.gather( - (c.close() for c in candidates if c is not chosen_connection), + *(c.close() for c in candidates if c is not chosen_connection), return_exceptions=True ) From bc79953ee478bd2526d66343fa53f61b67fe9234 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 16:54:02 -0700 Subject: [PATCH 112/193] Minor edits to the `target_session_attrs` patch. --- asyncpg/connect_utils.py | 6 ++---- asyncpg/connection.py | 27 +++++++++++++++------------ 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 6d788f60..b5beb4e8 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -611,21 +611,19 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, 'a Dict[str, str]') if target_session_attrs is None: - target_session_attrs = os.getenv( "PGTARGETSESSIONATTRS", SessionAttribute.any ) try: - target_session_attrs = SessionAttribute(target_session_attrs) - except ValueError as exc: + except ValueError: raise exceptions.InterfaceError( "target_session_attrs is expected to be one of " "{!r}" ", got {!r}".format( SessionAttribute.__members__.values, target_session_attrs ) - ) from exc + ) from None params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, diff --git a/asyncpg/connection.py b/asyncpg/connection.py index d54927cf..a7f249ba 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2008,18 +2008,18 @@ async def connect(dsn=None, *, If specified, check that the host has the correct attribute. Can be one of: - "any": the first successfully connected host - "primary": the host must NOT be in hot standby mode - "standby": the host must be in hot standby mode - "read-write": the host must allow writes - "read-only": the host most NOT allow writes - "prefer-standby": first try to find a standby host, but if - none of the listed hosts is a standby server, - return any of them. - - If not specified will try to use PGTARGETSESSIONATTRS - from the environment. - Defaults to "any" if no value is set. + - ``"any"`` - the first successfully connected host + - ``"primary"`` - the host must NOT be in hot standby mode + - ``"standby"`` - the host must be in hot standby mode + - ``"read-write"`` - the host must allow writes + - ``"read-only"`` - the host most NOT allow writes + - ``"prefer-standby"`` - first try to find a standby host, but if + none of the listed hosts is a standby server, + return any of them. + + If not specified, the value parsed from the *dsn* argument is used, + or the value of the ``PGTARGETSESSIONATTRS`` environment variable, + or ``"any"`` if neither is specified. :return: A :class:`~asyncpg.connection.Connection` instance. @@ -2086,6 +2086,9 @@ async def connect(dsn=None, *, .. versionchanged:: 0.26.0 Added the *direct_tls* parameter. + .. versionchanged:: 0.28.0 + Added the *target_session_attrs* parameter. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context From fdf559f6a75423da40fd605281e9b62335871553 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 17:03:39 -0700 Subject: [PATCH 113/193] Move pytest and coverage config to pyproject.toml --- .coveragerc | 12 ------------ pyproject.toml | 24 +++++++++++++++++++++++- pytest.ini | 4 ---- 3 files changed, 23 insertions(+), 17 deletions(-) delete mode 100644 .coveragerc delete mode 100644 pytest.ini diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 081835d3..00000000 --- a/.coveragerc +++ /dev/null @@ -1,12 +0,0 @@ -[run] -branch = True -plugins = Cython.Coverage -source = - asyncpg/ - tests/ -omit = - *.pxd - -[paths] -source = - asyncpg diff --git a/pyproject.toml b/pyproject.toml index 36540bd0..b4c82d6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ github = "https://github.com/MagicStack/asyncpg" [project.optional-dependencies] test = [ - 'flake8~=5.0.4', + 'flake8~=6.0.0', 'uvloop>=0.15.3; platform_system != "Windows"', ] docs = [ @@ -80,3 +80,25 @@ test-command = """\ && chmod -R go+rX "$(dirname $(dirname $(dirname $PY)))" \ && su -l apgtest -c "$PY {project}/tests/__init__.py" \ """ + +[tool.pytest.ini_options] +addopts = "--capture=no --assert=plain --strict-markers --tb=native --import-mode=importlib" +testpaths = "tests" +filterwarnings = "default" + +[tool.coverage.run] +branch = true +plugins = ["Cython.Coverage"] +parallel = true +source = ["asyncpg/", "tests/"] +omit = ["*.pxd"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if debug", + "raise NotImplementedError", + "if __name__ == .__main__.", +] +show_missing = true diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index b8a25b5f..00000000 --- a/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -addopts = --capture=no --assert=plain --strict-markers --tb=native --import-mode=importlib -testpaths = tests -filterwarnings = default From a1a7b1a596c81c93575f16d3ea6f50a1286a52f9 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 17:21:07 -0700 Subject: [PATCH 114/193] Scale flake8 back to 5.0 (#1048) Looks like later versions dropped Python 3.7 support --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b4c82d6f..3010936d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ github = "https://github.com/MagicStack/asyncpg" [project.optional-dependencies] test = [ - 'flake8~=6.0.0', + 'flake8~=5.0', 'uvloop>=0.15.3; platform_system != "Windows"', ] docs = [ From 9a12d149a8ff5f1a0d252a70a7015dc2876c0148 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 16:21:56 -0700 Subject: [PATCH 115/193] asyncpg v0.28.0 Minor fixes and improvements. Changes ======= * Do not try to cleanup statements (#981) (by @fvannee in d2e710fe for #981) * Add Pool.is_closing() method (#973) (by @singingwolfboy in 9cb2c1ce for #973) * Fix test_tls_version for LibreSSL (#974) (by @CyberTailor in 7df9812a for #974) * Handle environments without home dir (#1011) (by @LeonardBesson in 172b8f69 for #1011) * fix: salt and iterations parsing for scram (#1026) (by @trigonometr in 7443a9e7 for #1026) * Add support for target_session_attrs (#987) (by @JesseDeLoore in bf74e88b for #987) * Add support for READ UNCOMMITTED (#1039) (by @benwah in 2f20bae7 for #1039) * Update benchmarks, add psycopg3 (#1042) (by @elprans in 7d4fcf04 for #1042) --- .github/workflows/release.yml | 6 ++---- asyncpg/_version.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 406c3a3c..a294f9c3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -80,7 +80,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.x" - - run: pip install cibuildwheel==2.10.2 + - run: pip install cibuildwheel==2.13.1 - id: set-matrix run: | MATRIX_INCLUDE=$( @@ -118,13 +118,11 @@ jobs: if: runner.os == 'Linux' uses: docker/setup-qemu-action@v2 - - uses: pypa/cibuildwheel@v2.10.2 + - uses: pypa/cibuildwheel@v2.13.1 with: only: ${{ matrix.only }} env: CIBW_BUILD_VERBOSITY: 1 - CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28 - CIBW_MANYLINUX_AARCH64_IMAGE: manylinux_2_28 - uses: actions/upload-artifact@v3 with: diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 693e3bed..32a68a56 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.28.0.dev0' +__version__ = '0.28.0' From 511aeb2368f6a6e03b2bd5f58127d98e91c3eb54 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 6 Jul 2023 19:29:37 -0700 Subject: [PATCH 116/193] Post-release version bump --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 32a68a56..ddc3a79a 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.28.0' +__version__ = '0.29.0.dev0' From 87ab14315ef867df68861f35b7e2cc3158d65afd Mon Sep 17 00:00:00 2001 From: Bryan Forbes Date: Tue, 15 Aug 2023 18:07:11 -0500 Subject: [PATCH 117/193] Drop support for Python 3.7 (#1064) --- .flake8 | 2 +- .github/workflows/tests.yml | 2 +- README.rst | 2 +- docs/index.rst | 2 +- pyproject.toml | 6 +----- setup.py | 4 ++-- tests/test_adversity.py | 6 ++---- tests/test_listeners.py | 6 ++---- 8 files changed, 11 insertions(+), 19 deletions(-) diff --git a/.flake8 b/.flake8 index 3a8b87a8..decf40da 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,3 @@ [flake8] ignore = E402,E731,W503,W504,E252 -exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv +exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv,.tox diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a120e9a6..8c069709 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: diff --git a/README.rst b/README.rst index 22a086aa..ef2678db 100644 --- a/README.rst +++ b/README.rst @@ -13,7 +13,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. You can read more about asyncpg in an introductory `blog post `_. -asyncpg requires Python 3.7 or later and is supported for PostgreSQL +asyncpg requires Python 3.8 or later and is supported for PostgreSQL versions 9.5 to 15. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/docs/index.rst b/docs/index.rst index 833a7039..dd4768b4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,7 +15,7 @@ PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. -**asyncpg** requires Python 3.7 or later and is supported for PostgreSQL +**asyncpg** requires Python 3.8 or later and is supported for PostgreSQL versions 9.5 to 15. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/pyproject.toml b/pyproject.toml index 3010936d..c9e4bdd9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ name = "asyncpg" description = "An asyncio PostgreSQL driver" authors = [{name = "MagicStack Inc", email = "hello@magic.io"}] -requires-python = '>=3.7.0' +requires-python = '>=3.8.0' readme = "README.rst" license = {text = "Apache License, Version 2.0"} dynamic = ["version"] @@ -19,7 +19,6 @@ classifiers = [ "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -27,9 +26,6 @@ classifiers = [ "Programming Language :: Python :: Implementation :: CPython", "Topic :: Database :: Front-Ends", ] -dependencies = [ - 'typing-extensions>=3.7.4.3;python_version<"3.8"', -] [project.urls] github = "https://github.com/MagicStack/asyncpg" diff --git a/setup.py b/setup.py index 269e2fd7..c4d42d82 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ import sys -if sys.version_info < (3, 7): - raise RuntimeError('asyncpg requires Python 3.7 or greater') +if sys.version_info < (3, 8): + raise RuntimeError('asyncpg requires Python 3.8 or greater') import os import os.path diff --git a/tests/test_adversity.py b/tests/test_adversity.py index fa0f6ab1..71532317 100644 --- a/tests/test_adversity.py +++ b/tests/test_adversity.py @@ -10,16 +10,14 @@ import os import platform import unittest -import sys from asyncpg import _testbase as tb @unittest.skipIf(os.environ.get('PGHOST'), 'using remote cluster for testing') @unittest.skipIf( - platform.system() == 'Windows' and - sys.version_info >= (3, 8), - 'not compatible with ProactorEventLoop which is default in Python 3.8') + platform.system() == 'Windows', + 'not compatible with ProactorEventLoop which is default in Python 3.8+') class TestConnectionLoss(tb.ProxiedClusterTestCase): @tb.with_timeout(30.0) async def test_connection_close_timeout(self): diff --git a/tests/test_listeners.py b/tests/test_listeners.py index 7fdf0312..e398e0f6 100644 --- a/tests/test_listeners.py +++ b/tests/test_listeners.py @@ -8,7 +8,6 @@ import asyncio import os import platform -import sys import unittest from asyncpg import _testbase as tb @@ -314,9 +313,8 @@ def listener1(*args): @unittest.skipIf(os.environ.get('PGHOST'), 'using remote cluster for testing') @unittest.skipIf( - platform.system() == 'Windows' and - sys.version_info >= (3, 8), - 'not compatible with ProactorEventLoop which is default in Python 3.8') + platform.system() == 'Windows', + 'not compatible with ProactorEventLoop which is default in Python 3.8+') class TestConnectionTerminationListener(tb.ProxiedClusterTestCase): async def test_connection_termination_callback_called_on_remote(self): From cbf64e18a03d69b712bab1790584fc1a4a5b2bb6 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Aug 2023 09:30:06 -0700 Subject: [PATCH 118/193] When prepared statements are disabled, avoid relying on them harder (#1065) It appears that PgBouncer's `transaction` pooling mode does not consider implicit transactions properly, and so in a [`Parse`, `Flush`, `Bind`, `Execute`, `Sync`] sequence, `Flush` would be (incorrectly) considered by PgBouncer as a transaction boundary and it will happily send the following `Bind` / `Execute` messages to a different backend process. This makes it so that when `statement_cache_size` is set to `0`, asyncpg assumes a pessimistic stance on prepared statement persistence and does not rely on them even in implicit transactions. The above message sequence thus becomes `Parse`, `Flush`, `Parse` (a second time), `Bind`, `Execute`, `Sync`. This obviously has negative performance impact due to the extraneous `Parse`. Fixes: #1058 Fixes: #1041 --- asyncpg/connection.py | 46 ++++++++++++++++++++---------- asyncpg/protocol/coreproto.pxd | 3 +- asyncpg/protocol/coreproto.pyx | 18 +++++++++++- asyncpg/protocol/prepared_stmt.pxd | 1 + asyncpg/protocol/prepared_stmt.pyx | 7 +++++ asyncpg/protocol/protocol.pyx | 31 ++++++++++++++------ 6 files changed, 81 insertions(+), 25 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index a7f249ba..2d689512 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -47,6 +47,7 @@ class Connection(metaclass=ConnectionMeta): __slots__ = ('_protocol', '_transport', '_loop', '_top_xact', '_aborted', '_pool_release_ctr', '_stmt_cache', '_stmts_to_close', + '_stmt_cache_enabled', '_listeners', '_server_version', '_server_caps', '_intro_query', '_reset_query', '_proxy', '_stmt_exclusive_section', '_config', '_params', '_addr', @@ -79,6 +80,7 @@ def __init__(self, protocol, transport, loop, max_lifetime=config.max_cached_statement_lifetime) self._stmts_to_close = set() + self._stmt_cache_enabled = config.statement_cache_size > 0 self._listeners = {} self._log_listeners = set() @@ -381,11 +383,13 @@ async def _get_statement( # Only use the cache when: # * `statement_cache_size` is greater than 0; # * query size is less than `max_cacheable_statement_size`. - use_cache = self._stmt_cache.get_max_size() > 0 - if (use_cache and - self._config.max_cacheable_statement_size and - len(query) > self._config.max_cacheable_statement_size): - use_cache = False + use_cache = ( + self._stmt_cache_enabled + and ( + not self._config.max_cacheable_statement_size + or len(query) <= self._config.max_cacheable_statement_size + ) + ) if isinstance(named, str): stmt_name = named @@ -434,14 +438,16 @@ async def _get_statement( # for the statement. statement._init_codecs() - if need_reprepare: - await self._protocol.prepare( - stmt_name, - query, - timeout, - state=statement, - record_class=record_class, - ) + if ( + need_reprepare + or (not statement.name and not self._stmt_cache_enabled) + ): + # Mark this anonymous prepared statement as "unprepared", + # causing it to get re-Parsed in next bind_execute. + # We always do this when stmt_cache_size is set to 0 assuming + # people are running PgBouncer which is mishandling implicit + # transactions. + statement.mark_unprepared() if use_cache: self._stmt_cache.put( @@ -1679,7 +1685,13 @@ async def __execute( record_class=None ): executor = lambda stmt, timeout: self._protocol.bind_execute( - stmt, args, '', limit, return_status, timeout) + state=stmt, + args=args, + portal_name='', + limit=limit, + return_extra=return_status, + timeout=timeout, + ) timeout = self._protocol._get_timeout(timeout) return await self._do_execute( query, @@ -1691,7 +1703,11 @@ async def __execute( async def _executemany(self, query, args, timeout): executor = lambda stmt, timeout: self._protocol.bind_execute_many( - stmt, args, '', timeout) + state=stmt, + args=args, + portal_name='', + timeout=timeout, + ) timeout = self._protocol._get_timeout(timeout) with self._stmt_exclusive_section: result, _ = await self._do_execute(query, executor, timeout) diff --git a/asyncpg/protocol/coreproto.pxd b/asyncpg/protocol/coreproto.pxd index f21559b4..7ce4f574 100644 --- a/asyncpg/protocol/coreproto.pxd +++ b/asyncpg/protocol/coreproto.pxd @@ -167,7 +167,8 @@ cdef class CoreProtocol: cdef _connect(self) - cdef _prepare(self, str stmt_name, str query) + cdef _prepare_and_describe(self, str stmt_name, str query) + cdef _send_parse_message(self, str stmt_name, str query) cdef _send_bind_message(self, str portal_name, str stmt_name, WriteBuffer bind_data, int32_t limit) cdef _bind_execute(self, str portal_name, str stmt_name, diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 6bf1adc4..92754484 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -237,6 +237,10 @@ cdef class CoreProtocol: # ErrorResponse self._parse_msg_error_response(True) + elif mtype == b'1': + # ParseComplete, in case `_bind_execute()` is reparsing + self.buffer.discard_message() + elif mtype == b'2': # BindComplete self.buffer.discard_message() @@ -269,6 +273,10 @@ cdef class CoreProtocol: # ErrorResponse self._parse_msg_error_response(True) + elif mtype == b'1': + # ParseComplete, in case `_bind_execute_many()` is reparsing + self.buffer.discard_message() + elif mtype == b'2': # BindComplete self.buffer.discard_message() @@ -874,7 +882,15 @@ cdef class CoreProtocol: outbuf.write_buffer(buf) self._write(outbuf) - cdef _prepare(self, str stmt_name, str query): + cdef _send_parse_message(self, str stmt_name, str query): + cdef: + WriteBuffer msg + + self._ensure_connected() + msg = self._build_parse_message(stmt_name, query) + self._write(msg) + + cdef _prepare_and_describe(self, str stmt_name, str query): cdef: WriteBuffer packet WriteBuffer buf diff --git a/asyncpg/protocol/prepared_stmt.pxd b/asyncpg/protocol/prepared_stmt.pxd index 3906af25..369db733 100644 --- a/asyncpg/protocol/prepared_stmt.pxd +++ b/asyncpg/protocol/prepared_stmt.pxd @@ -10,6 +10,7 @@ cdef class PreparedStatementState: readonly str name readonly str query readonly bint closed + readonly bint prepared readonly int refs readonly type record_class readonly bint ignore_custom_codec diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index b1f2a66d..7335825c 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -27,6 +27,7 @@ cdef class PreparedStatementState: self.args_num = self.cols_num = 0 self.cols_desc = None self.closed = False + self.prepared = True self.refs = 0 self.record_class = record_class self.ignore_custom_codec = ignore_custom_codec @@ -101,6 +102,12 @@ cdef class PreparedStatementState: def mark_closed(self): self.closed = True + def mark_unprepared(self): + if self.name: + raise exceptions.InternalClientError( + "named prepared statements cannot be marked unprepared") + self.prepared = False + cdef _encode_bind_msg(self, args, int seqno = -1): cdef: int idx diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 3f512a81..f504d9d0 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -155,7 +155,7 @@ cdef class BaseProtocol(CoreProtocol): waiter = self._new_waiter(timeout) try: - self._prepare(stmt_name, query) # network op + self._prepare_and_describe(stmt_name, query) # network op self.last_query = query if state is None: state = PreparedStatementState( @@ -168,10 +168,15 @@ cdef class BaseProtocol(CoreProtocol): return await waiter @cython.iterable_coroutine - async def bind_execute(self, PreparedStatementState state, args, - str portal_name, int limit, return_extra, - timeout): - + async def bind_execute( + self, + state: PreparedStatementState, + args, + portal_name: str, + limit: int, + return_extra: bool, + timeout, + ): if self.cancel_waiter is not None: await self.cancel_waiter if self.cancel_sent_waiter is not None: @@ -184,6 +189,9 @@ cdef class BaseProtocol(CoreProtocol): waiter = self._new_waiter(timeout) try: + if not state.prepared: + self._send_parse_message(state.name, state.query) + self._bind_execute( portal_name, state.name, @@ -201,9 +209,13 @@ cdef class BaseProtocol(CoreProtocol): return await waiter @cython.iterable_coroutine - async def bind_execute_many(self, PreparedStatementState state, args, - str portal_name, timeout): - + async def bind_execute_many( + self, + state: PreparedStatementState, + args, + portal_name: str, + timeout, + ): if self.cancel_waiter is not None: await self.cancel_waiter if self.cancel_sent_waiter is not None: @@ -222,6 +234,9 @@ cdef class BaseProtocol(CoreProtocol): waiter = self._new_waiter(timeout) try: + if not state.prepared: + self._send_parse_message(state.name, state.query) + more = self._bind_execute_many( portal_name, state.name, From af922bcf79a0f831892b1173f3409384e0958a83 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Aug 2023 11:31:04 -0700 Subject: [PATCH 119/193] Handle environments with HOME set to a not-a-directory (#1063) If `HOME` points to a regular file (or `/dev/null`), make sure we don't crash unnecessarily, and if we do need to crash, so so informatively. Fixes: #1014 --- asyncpg/connect_utils.py | 64 ++++++++++++++++++++++++------------- asyncpg/exceptions/_base.py | 7 +++- tests/test_connect.py | 32 +++++++++++++++++-- 3 files changed, 78 insertions(+), 25 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index b5beb4e8..b91da671 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -165,7 +165,7 @@ def _validate_port_spec(hosts, port): # If there is a list of ports, its length must # match that of the host list. if len(port) != len(hosts): - raise exceptions.InterfaceError( + raise exceptions.ClientConfigurationError( 'could not match {} port numbers to {} hosts'.format( len(port), len(hosts))) else: @@ -211,7 +211,7 @@ def _parse_hostlist(hostlist, port, *, unquote=False): addr = m.group(1) hostspec_port = m.group(2) else: - raise ValueError( + raise exceptions.ClientConfigurationError( 'invalid IPv6 address in the connection URI: {!r}'.format( hostspec ) @@ -240,13 +240,13 @@ def _parse_hostlist(hostlist, port, *, unquote=False): def _parse_tls_version(tls_version): if tls_version.startswith('SSL'): - raise ValueError( + raise exceptions.ClientConfigurationError( f"Unsupported TLS version: {tls_version}" ) try: return ssl_module.TLSVersion[tls_version.replace('.', '_')] except KeyError: - raise ValueError( + raise exceptions.ClientConfigurationError( f"No such TLS version: {tls_version}" ) @@ -274,7 +274,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, parsed = urllib.parse.urlparse(dsn) if parsed.scheme not in {'postgresql', 'postgres'}: - raise ValueError( + raise exceptions.ClientConfigurationError( 'invalid DSN: scheme is expected to be either ' '"postgresql" or "postgres", got {!r}'.format(parsed.scheme)) @@ -437,11 +437,11 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, database = user if user is None: - raise exceptions.InterfaceError( + raise exceptions.ClientConfigurationError( 'could not determine user name to connect with') if database is None: - raise exceptions.InterfaceError( + raise exceptions.ClientConfigurationError( 'could not determine database name to connect to') if password is None: @@ -477,7 +477,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, have_tcp_addrs = True if not addrs: - raise ValueError( + raise exceptions.InternalClientError( 'could not determine the database address to connect to') if ssl is None: @@ -491,7 +491,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, sslmode = SSLMode.parse(ssl) except AttributeError: modes = ', '.join(m.name.replace('_', '-') for m in SSLMode) - raise exceptions.InterfaceError( + raise exceptions.ClientConfigurationError( '`sslmode` parameter must be one of: {}'.format(modes)) # docs at https://www.postgresql.org/docs/10/static/libpq-connect.html @@ -511,19 +511,36 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, else: try: sslrootcert = _dot_postgresql_path('root.crt') - assert sslrootcert is not None - ssl.load_verify_locations(cafile=sslrootcert) - except (AssertionError, FileNotFoundError): + if sslrootcert is not None: + ssl.load_verify_locations(cafile=sslrootcert) + else: + raise exceptions.ClientConfigurationError( + 'cannot determine location of user ' + 'PostgreSQL configuration directory' + ) + except ( + exceptions.ClientConfigurationError, + FileNotFoundError, + NotADirectoryError, + ): if sslmode > SSLMode.require: if sslrootcert is None: - raise RuntimeError( - 'Cannot determine home directory' + sslrootcert = '~/.postgresql/root.crt' + detail = ( + 'Could not determine location of user ' + 'home directory (HOME is either unset, ' + 'inaccessible, or does not point to a ' + 'valid directory)' ) - raise ValueError( + else: + detail = None + raise exceptions.ClientConfigurationError( f'root certificate file "{sslrootcert}" does ' - f'not exist\nEither provide the file or ' - f'change sslmode to disable server ' - f'certificate verification.' + f'not exist or cannot be accessed', + hint='Provide the certificate file directly ' + f'or make sure "{sslrootcert}" ' + 'exists and is readable.', + detail=detail, ) elif sslmode == SSLMode.require: ssl.verify_mode = ssl_module.CERT_NONE @@ -542,7 +559,10 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if sslcrl is not None: try: ssl.load_verify_locations(cafile=sslcrl) - except FileNotFoundError: + except ( + FileNotFoundError, + NotADirectoryError, + ): pass else: ssl.verify_flags |= \ @@ -571,7 +591,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, keyfile=sslkey, password=lambda: sslpassword ) - except FileNotFoundError: + except (FileNotFoundError, NotADirectoryError): pass # OpenSSL 1.1.1 keylog file, copied from create_default_context() @@ -606,7 +626,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, not isinstance(server_settings, dict) or not all(isinstance(k, str) for k in server_settings) or not all(isinstance(v, str) for v in server_settings.values())): - raise ValueError( + raise exceptions.ClientConfigurationError( 'server_settings is expected to be None or ' 'a Dict[str, str]') @@ -617,7 +637,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, try: target_session_attrs = SessionAttribute(target_session_attrs) except ValueError: - raise exceptions.InterfaceError( + raise exceptions.ClientConfigurationError( "target_session_attrs is expected to be one of " "{!r}" ", got {!r}".format( diff --git a/asyncpg/exceptions/_base.py b/asyncpg/exceptions/_base.py index de981d25..e2da6bd8 100644 --- a/asyncpg/exceptions/_base.py +++ b/asyncpg/exceptions/_base.py @@ -13,7 +13,8 @@ __all__ = ('PostgresError', 'FatalPostgresError', 'UnknownPostgresError', 'InterfaceError', 'InterfaceWarning', 'PostgresLogMessage', 'InternalClientError', 'OutdatedSchemaCacheError', 'ProtocolError', - 'UnsupportedClientFeatureError', 'TargetServerAttributeNotMatched') + 'UnsupportedClientFeatureError', 'TargetServerAttributeNotMatched', + 'ClientConfigurationError') def _is_asyncpg_class(cls): @@ -220,6 +221,10 @@ def with_msg(self, msg): ) +class ClientConfigurationError(InterfaceError, ValueError): + """An error caused by improper client configuration.""" + + class DataError(InterfaceError, ValueError): """An error caused by invalid query input.""" diff --git a/tests/test_connect.py b/tests/test_connect.py index e3cfb372..0e600066 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -79,6 +79,15 @@ def mock_no_home_dir(): yield +@contextlib.contextmanager +def mock_dev_null_home_dir(): + with unittest.mock.patch( + 'pathlib.Path.home', + unittest.mock.Mock(return_value=pathlib.Path('/dev/null')), + ): + yield + + class TestSettings(tb.ConnectedTestCase): async def test_get_settings_01(self): @@ -1318,9 +1327,18 @@ async def test_connection_no_home_dir(self): await con.fetchval('SELECT 42') await con.close() + with mock_dev_null_home_dir(): + con = await self.connect( + dsn='postgresql://foo/', + user='postgres', + database='postgres', + host='localhost') + await con.fetchval('SELECT 42') + await con.close() + with self.assertRaisesRegex( - RuntimeError, - 'Cannot determine home directory' + exceptions.ClientConfigurationError, + r'root certificate file "~/\.postgresql/root\.crt" does not exist' ): with mock_no_home_dir(): await self.connect( @@ -1328,6 +1346,16 @@ async def test_connection_no_home_dir(self): user='ssl_user', ssl='verify-full') + with self.assertRaisesRegex( + exceptions.ClientConfigurationError, + r'root certificate file ".*" does not exist' + ): + with mock_dev_null_home_dir(): + await self.connect( + host='localhost', + user='ssl_user', + ssl='verify-full') + class BaseTestSSLConnection(tb.ConnectedTestCase): @classmethod From 922fcd105502a07c7e550dcabceac9284de29307 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Aug 2023 11:50:47 -0700 Subject: [PATCH 120/193] Add support for tuple-format custom codecs on composite types (#1061) It is now possible to `set_type_codec('mycomposite', ... format='tuple')`, which is useful for types that are represented by a composite type in Postgres, but are an integral type in Python, e.g. `complex`. Fixes: #1060 --- asyncpg/connection.py | 34 ++++++++++-- asyncpg/introspection.py | 4 ++ asyncpg/protocol/codecs/base.pxd | 3 ++ asyncpg/protocol/codecs/base.pyx | 90 ++++++++++++++++++++++---------- asyncpg/protocol/settings.pxd | 2 +- asyncpg/protocol/settings.pyx | 6 ++- docs/usage.rst | 43 ++++++++++++++- tests/test_codecs.py | 66 ++++++++++++++++------- 8 files changed, 192 insertions(+), 56 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 2d689512..45cf99b1 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1160,6 +1160,9 @@ async def set_type_codec(self, typename, *, | ``time with | (``microseconds``, | | time zone`` | ``time zone offset in seconds``) | +-----------------+---------------------------------------------+ + | any composite | Composite value elements | + | type | | + +-----------------+---------------------------------------------+ :param encoder: Callable accepting a Python object as a single argument and @@ -1214,6 +1217,10 @@ async def set_type_codec(self, typename, *, The ``binary`` keyword argument was removed in favor of ``format``. + .. versionchanged:: 0.29.0 + Custom codecs for composite types are now supported with + ``format='tuple'``. + .. note:: It is recommended to use the ``'binary'`` or ``'tuple'`` *format* @@ -1224,11 +1231,28 @@ async def set_type_codec(self, typename, *, codecs. """ self._check_open() + settings = self._protocol.get_settings() typeinfo = await self._introspect_type(typename, schema) - if not introspection.is_scalar_type(typeinfo): + full_typeinfos = [] + if introspection.is_scalar_type(typeinfo): + kind = 'scalar' + elif introspection.is_composite_type(typeinfo): + if format != 'tuple': + raise exceptions.UnsupportedClientFeatureError( + 'only tuple-format codecs can be used on composite types', + hint="Use `set_type_codec(..., format='tuple')` and " + "pass/interpret data as a Python tuple. See an " + "example at https://magicstack.github.io/asyncpg/" + "current/usage.html#example-decoding-complex-types", + ) + kind = 'composite' + full_typeinfos, _ = await self._introspect_types( + (typeinfo['oid'],), 10) + else: raise exceptions.InterfaceError( - 'cannot use custom codec on non-scalar type {}.{}'.format( - schema, typename)) + f'cannot use custom codec on type {schema}.{typename}: ' + f'it is neither a scalar type nor a composite type' + ) if introspection.is_domain_type(typeinfo): raise exceptions.UnsupportedClientFeatureError( 'custom codecs on domain types are not supported', @@ -1240,8 +1264,8 @@ async def set_type_codec(self, typename, *, ) oid = typeinfo['oid'] - self._protocol.get_settings().add_python_codec( - oid, typename, schema, 'scalar', + settings.add_python_codec( + oid, typename, schema, full_typeinfos, kind, encoder, decoder, format) # Statement cache is no longer valid due to codec changes. diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index d62f39a0..6c2caf03 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -286,3 +286,7 @@ def is_scalar_type(typeinfo) -> bool: def is_domain_type(typeinfo) -> bool: return typeinfo['kind'] == b'd' + + +def is_composite_type(typeinfo) -> bool: + return typeinfo['kind'] == b'c' diff --git a/asyncpg/protocol/codecs/base.pxd b/asyncpg/protocol/codecs/base.pxd index 16928b88..1cfed833 100644 --- a/asyncpg/protocol/codecs/base.pxd +++ b/asyncpg/protocol/codecs/base.pxd @@ -57,6 +57,7 @@ cdef class Codec: encode_func c_encoder decode_func c_decoder + Codec base_codec object py_encoder object py_decoder @@ -79,6 +80,7 @@ cdef class Codec: CodecType type, ServerDataFormat format, ClientExchangeFormat xformat, encode_func c_encoder, decode_func c_decoder, + Codec base_codec, object py_encoder, object py_decoder, Codec element_codec, tuple element_type_oids, object element_names, list element_codecs, @@ -169,6 +171,7 @@ cdef class Codec: object decoder, encode_func c_encoder, decode_func c_decoder, + Codec base_codec, ServerDataFormat format, ClientExchangeFormat xformat) diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index 273b27aa..c269e374 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -23,14 +23,25 @@ cdef class Codec: self.oid = oid self.type = CODEC_UNDEFINED - cdef init(self, str name, str schema, str kind, - CodecType type, ServerDataFormat format, - ClientExchangeFormat xformat, - encode_func c_encoder, decode_func c_decoder, - object py_encoder, object py_decoder, - Codec element_codec, tuple element_type_oids, - object element_names, list element_codecs, - Py_UCS4 element_delimiter): + cdef init( + self, + str name, + str schema, + str kind, + CodecType type, + ServerDataFormat format, + ClientExchangeFormat xformat, + encode_func c_encoder, + decode_func c_decoder, + Codec base_codec, + object py_encoder, + object py_decoder, + Codec element_codec, + tuple element_type_oids, + object element_names, + list element_codecs, + Py_UCS4 element_delimiter, + ): self.name = name self.schema = schema @@ -40,6 +51,7 @@ cdef class Codec: self.xformat = xformat self.c_encoder = c_encoder self.c_decoder = c_decoder + self.base_codec = base_codec self.py_encoder = py_encoder self.py_decoder = py_decoder self.element_codec = element_codec @@ -48,6 +60,12 @@ cdef class Codec: self.element_delimiter = element_delimiter self.element_names = element_names + if base_codec is not None: + if c_encoder != NULL or c_decoder != NULL: + raise exceptions.InternalClientError( + 'base_codec is mutually exclusive with c_encoder/c_decoder' + ) + if element_names is not None: self.record_desc = record.ApgRecordDesc_New( element_names, tuple(element_names)) @@ -98,7 +116,7 @@ cdef class Codec: codec = Codec(self.oid) codec.init(self.name, self.schema, self.kind, self.type, self.format, self.xformat, - self.c_encoder, self.c_decoder, + self.c_encoder, self.c_decoder, self.base_codec, self.py_encoder, self.py_decoder, self.element_codec, self.element_type_oids, self.element_names, @@ -196,7 +214,10 @@ cdef class Codec: raise exceptions.InternalClientError( 'unexpected data format: {}'.format(self.format)) elif self.xformat == PG_XFORMAT_TUPLE: - self.c_encoder(settings, buf, data) + if self.base_codec is not None: + self.base_codec.encode(settings, buf, data) + else: + self.c_encoder(settings, buf, data) else: raise exceptions.InternalClientError( 'unexpected exchange format: {}'.format(self.xformat)) @@ -295,7 +316,10 @@ cdef class Codec: raise exceptions.InternalClientError( 'unexpected data format: {}'.format(self.format)) elif self.xformat == PG_XFORMAT_TUPLE: - data = self.c_decoder(settings, buf) + if self.base_codec is not None: + data = self.base_codec.decode(settings, buf) + else: + data = self.c_decoder(settings, buf) else: raise exceptions.InternalClientError( 'unexpected exchange format: {}'.format(self.xformat)) @@ -367,8 +391,8 @@ cdef class Codec: cdef Codec codec codec = Codec(oid) codec.init(name, schema, 'array', CODEC_ARRAY, element_codec.format, - PG_XFORMAT_OBJECT, NULL, NULL, None, None, element_codec, - None, None, None, element_delimiter) + PG_XFORMAT_OBJECT, NULL, NULL, None, None, None, + element_codec, None, None, None, element_delimiter) return codec @staticmethod @@ -379,8 +403,8 @@ cdef class Codec: cdef Codec codec codec = Codec(oid) codec.init(name, schema, 'range', CODEC_RANGE, element_codec.format, - PG_XFORMAT_OBJECT, NULL, NULL, None, None, element_codec, - None, None, None, 0) + PG_XFORMAT_OBJECT, NULL, NULL, None, None, None, + element_codec, None, None, None, 0) return codec @staticmethod @@ -391,7 +415,7 @@ cdef class Codec: cdef Codec codec codec = Codec(oid) codec.init(name, schema, 'multirange', CODEC_MULTIRANGE, - element_codec.format, PG_XFORMAT_OBJECT, NULL, NULL, + element_codec.format, PG_XFORMAT_OBJECT, NULL, NULL, None, None, None, element_codec, None, None, None, 0) return codec @@ -407,7 +431,7 @@ cdef class Codec: codec = Codec(oid) codec.init(name, schema, 'composite', CODEC_COMPOSITE, format, PG_XFORMAT_OBJECT, NULL, NULL, None, None, None, - element_type_oids, element_names, element_codecs, 0) + None, element_type_oids, element_names, element_codecs, 0) return codec @staticmethod @@ -419,12 +443,13 @@ cdef class Codec: object decoder, encode_func c_encoder, decode_func c_decoder, + Codec base_codec, ServerDataFormat format, ClientExchangeFormat xformat): cdef Codec codec codec = Codec(oid) codec.init(name, schema, kind, CODEC_PY, format, xformat, - c_encoder, c_decoder, encoder, decoder, + c_encoder, c_decoder, base_codec, encoder, decoder, None, None, None, None, 0) return codec @@ -596,17 +621,21 @@ cdef class DataCodecConfig: self.declare_fallback_codec(oid, name, schema) def add_python_codec(self, typeoid, typename, typeschema, typekind, - encoder, decoder, format, xformat): + typeinfos, encoder, decoder, format, xformat): cdef: - Codec core_codec + Codec core_codec = None encode_func c_encoder = NULL decode_func c_decoder = NULL + Codec base_codec = None uint32_t oid = pylong_as_oid(typeoid) bint codec_set = False # Clear all previous overrides (this also clears type cache). self.remove_python_codec(typeoid, typename, typeschema) + if typeinfos: + self.add_types(typeinfos) + if format == PG_FORMAT_ANY: formats = (PG_FORMAT_TEXT, PG_FORMAT_BINARY) else: @@ -614,16 +643,21 @@ cdef class DataCodecConfig: for fmt in formats: if xformat == PG_XFORMAT_TUPLE: - core_codec = get_core_codec(oid, fmt, xformat) - if core_codec is None: - continue - c_encoder = core_codec.c_encoder - c_decoder = core_codec.c_decoder + if typekind == "scalar": + core_codec = get_core_codec(oid, fmt, xformat) + if core_codec is None: + continue + c_encoder = core_codec.c_encoder + c_decoder = core_codec.c_decoder + elif typekind == "composite": + base_codec = self.get_codec(oid, fmt) + if base_codec is None: + continue self._custom_type_codecs[typeoid, fmt] = \ Codec.new_python_codec(oid, typename, typeschema, typekind, encoder, decoder, c_encoder, c_decoder, - fmt, xformat) + base_codec, fmt, xformat) codec_set = True if not codec_set: @@ -829,7 +863,7 @@ cdef register_core_codec(uint32_t oid, codec = Codec(oid) codec.init(name, 'pg_catalog', kind, CODEC_C, format, xformat, - encode, decode, None, None, None, None, None, None, 0) + encode, decode, None, None, None, None, None, None, None, 0) cpython.Py_INCREF(codec) # immortalize if format == PG_FORMAT_BINARY: @@ -853,7 +887,7 @@ cdef register_extra_codec(str name, codec = Codec(INVALIDOID) codec.init(name, None, kind, CODEC_C, format, PG_XFORMAT_OBJECT, - encode, decode, None, None, None, None, None, None, 0) + encode, decode, None, None, None, None, None, None, None, 0) EXTRA_CODECS[name, format] = codec diff --git a/asyncpg/protocol/settings.pxd b/asyncpg/protocol/settings.pxd index 41131cdc..0a1a5f6f 100644 --- a/asyncpg/protocol/settings.pxd +++ b/asyncpg/protocol/settings.pxd @@ -18,7 +18,7 @@ cdef class ConnectionSettings(pgproto.CodecContext): cpdef get_text_codec(self) cpdef inline register_data_types(self, types) cpdef inline add_python_codec( - self, typeoid, typename, typeschema, typekind, encoder, + self, typeoid, typename, typeschema, typeinfos, typekind, encoder, decoder, format) cpdef inline remove_python_codec( self, typeoid, typename, typeschema) diff --git a/asyncpg/protocol/settings.pyx b/asyncpg/protocol/settings.pyx index b4cfa399..8e6591b9 100644 --- a/asyncpg/protocol/settings.pyx +++ b/asyncpg/protocol/settings.pyx @@ -36,7 +36,8 @@ cdef class ConnectionSettings(pgproto.CodecContext): self._data_codecs.add_types(types) cpdef inline add_python_codec(self, typeoid, typename, typeschema, - typekind, encoder, decoder, format): + typeinfos, typekind, encoder, decoder, + format): cdef: ServerDataFormat _format ClientExchangeFormat xformat @@ -57,7 +58,8 @@ cdef class ConnectionSettings(pgproto.CodecContext): )) self._data_codecs.add_python_codec(typeoid, typename, typeschema, - typekind, encoder, decoder, + typekind, typeinfos, + encoder, decoder, _format, xformat) cpdef inline remove_python_codec(self, typeoid, typename, typeschema): diff --git a/docs/usage.rst b/docs/usage.rst index a6c62b41..82a7a370 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -216,7 +216,46 @@ JSON values using the :mod:`json ` module. finally: await conn.close() - asyncio.get_event_loop().run_until_complete(main()) + asyncio.run(main()) + + +Example: complex types +~~~~~~~~~~~~~~~~~~~~~~ + +The example below shows how to configure asyncpg to encode and decode +Python :class:`complex ` values to a custom composite +type in PostgreSQL. + +.. code-block:: python + + import asyncio + import asyncpg + + + async def main(): + conn = await asyncpg.connect() + + try: + await conn.execute( + ''' + CREATE TYPE mycomplex AS ( + r float, + i float + );''' + ) + await conn.set_type_codec( + 'complex', + encoder=lambda x: (x.real, x.imag), + decoder=lambda t: complex(t[0], t[1]), + format='tuple', + ) + + res = await conn.fetchval('SELECT $1::mycomplex', (1+2j)) + + finally: + await conn.close() + + asyncio.run(main()) Example: automatic conversion of PostGIS types @@ -274,7 +313,7 @@ will work. finally: await conn.close() - asyncio.get_event_loop().run_until_complete(main()) + asyncio.run(main()) Example: decoding numeric columns as floats diff --git a/tests/test_codecs.py b/tests/test_codecs.py index 918e01d5..bffb2f1a 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -1212,28 +1212,11 @@ def hstore_encoder(obj): self.assertEqual(at[0].name, 'result') self.assertEqual(at[0].type, pt[0]) - err = 'cannot use custom codec on non-scalar type public._hstore' + err = 'cannot use custom codec on type public._hstore' with self.assertRaisesRegex(asyncpg.InterfaceError, err): await self.con.set_type_codec('_hstore', encoder=hstore_encoder, decoder=hstore_decoder) - - await self.con.execute(''' - CREATE TYPE mytype AS (a int); - ''') - - try: - err = 'cannot use custom codec on non-scalar type ' + \ - 'public.mytype' - with self.assertRaisesRegex(asyncpg.InterfaceError, err): - await self.con.set_type_codec( - 'mytype', encoder=hstore_encoder, - decoder=hstore_decoder) - finally: - await self.con.execute(''' - DROP TYPE mytype; - ''') - finally: await self.con.execute(''' DROP EXTENSION hstore @@ -1546,6 +1529,53 @@ def _decoder(value): finally: await conn.close() + async def test_custom_codec_composite_tuple(self): + await self.con.execute(''' + CREATE TYPE mycomplex AS (r float, i float); + ''') + + try: + await self.con.set_type_codec( + 'mycomplex', + encoder=lambda x: (x.real, x.imag), + decoder=lambda t: complex(t[0], t[1]), + format='tuple', + ) + + num = complex('1+2j') + + res = await self.con.fetchval( + 'SELECT $1::mycomplex', + num, + ) + + self.assertEqual(num, res) + + finally: + await self.con.execute(''' + DROP TYPE mycomplex; + ''') + + async def test_custom_codec_composite_non_tuple(self): + await self.con.execute(''' + CREATE TYPE mycomplex AS (r float, i float); + ''') + + try: + with self.assertRaisesRegex( + asyncpg.UnsupportedClientFeatureError, + "only tuple-format codecs can be used on composite types", + ): + await self.con.set_type_codec( + 'mycomplex', + encoder=lambda x: (x.real, x.imag), + decoder=lambda t: complex(t[0], t[1]), + ) + finally: + await self.con.execute(''' + DROP TYPE mycomplex; + ''') + async def test_timetz_encoding(self): try: async with self.con.transaction(): From 89d5bd032335fd39cb24f972bb669ab91de57394 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Aug 2023 13:50:14 -0700 Subject: [PATCH 121/193] Fix handling of non-ASCII passwords (#1062) Fixes: #1018 --- asyncpg/protocol/coreproto.pyx | 10 +++++----- tests/test_connect.py | 27 +++++++++++++++------------ 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 92754484..64afe934 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -642,7 +642,7 @@ cdef class CoreProtocol: WriteBuffer msg msg = WriteBuffer.new_message(b'p') - msg.write_bytestring(self.password.encode('ascii')) + msg.write_bytestring(self.password.encode(self.encoding)) msg.end_message() return msg @@ -654,11 +654,11 @@ cdef class CoreProtocol: msg = WriteBuffer.new_message(b'p') # 'md5' + md5(md5(password + username) + salt)) - userpass = ((self.password or '') + (self.user or '')).encode('ascii') - hash = hashlib.md5(hashlib.md5(userpass).hexdigest().\ - encode('ascii') + salt).hexdigest().encode('ascii') + userpass = (self.password or '') + (self.user or '') + md5_1 = hashlib.md5(userpass.encode(self.encoding)).hexdigest() + md5_2 = hashlib.md5(md5_1.encode('ascii') + salt).hexdigest() - msg.write_bytestring(b'md5' + hash) + msg.write_bytestring(b'md5' + md5_2.encode('ascii')) msg.end_message() return msg diff --git a/tests/test_connect.py b/tests/test_connect.py index 0e600066..4c6fa4bd 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -126,6 +126,9 @@ def test_server_version_02(self): self.assertEqual(expected, result) +CORRECT_PASSWORD = 'correct\u1680password' + + class TestAuthentication(tb.ConnectedTestCase): def setUp(self): super().setUp() @@ -136,9 +139,9 @@ def setUp(self): methods = [ ('trust', None), ('reject', None), - ('scram-sha-256', 'correctpassword'), - ('md5', 'correctpassword'), - ('password', 'correctpassword'), + ('scram-sha-256', CORRECT_PASSWORD), + ('md5', CORRECT_PASSWORD), + ('password', CORRECT_PASSWORD), ] self.cluster.reset_hba() @@ -160,7 +163,7 @@ def setUp(self): create_script.append( 'CREATE ROLE {}_user WITH LOGIN{};'.format( username, - ' PASSWORD {!r}'.format(password) if password else '' + f' PASSWORD E{(password or "")!r}' ) ) @@ -250,7 +253,7 @@ async def test_auth_reject(self): async def test_auth_password_cleartext(self): conn = await self.connect( user='password_user', - password='correctpassword') + password=CORRECT_PASSWORD) await conn.close() with self.assertRaisesRegex( @@ -262,7 +265,7 @@ async def test_auth_password_cleartext(self): async def test_auth_password_cleartext_callable(self): def get_correctpassword(): - return 'correctpassword' + return CORRECT_PASSWORD def get_wrongpassword(): return 'wrongpassword' @@ -281,7 +284,7 @@ def get_wrongpassword(): async def test_auth_password_cleartext_callable_coroutine(self): async def get_correctpassword(): - return 'correctpassword' + return CORRECT_PASSWORD async def get_wrongpassword(): return 'wrongpassword' @@ -300,7 +303,7 @@ async def get_wrongpassword(): async def test_auth_password_cleartext_callable_awaitable(self): async def get_correctpassword(): - return 'correctpassword' + return CORRECT_PASSWORD async def get_wrongpassword(): return 'wrongpassword' @@ -319,7 +322,7 @@ async def get_wrongpassword(): async def test_auth_password_md5(self): conn = await self.connect( - user='md5_user', password='correctpassword') + user='md5_user', password=CORRECT_PASSWORD) await conn.close() with self.assertRaisesRegex( @@ -334,7 +337,7 @@ async def test_auth_password_scram_sha_256(self): return conn = await self.connect( - user='scram_sha_256_user', password='correctpassword') + user='scram_sha_256_user', password=CORRECT_PASSWORD) await conn.close() with self.assertRaisesRegex( @@ -371,7 +374,7 @@ async def test_auth_password_scram_sha_256(self): await conn.close() alter_password = \ - "ALTER ROLE scram_sha_256_user PASSWORD 'correctpassword';" + f"ALTER ROLE scram_sha_256_user PASSWORD E{CORRECT_PASSWORD!r};" await self.con.execute(alter_password) await self.con.execute("SET password_encryption = 'md5';") @@ -381,7 +384,7 @@ async def test_auth_md5_unsupported(self, _): exceptions.InternalClientError, ".*no md5.*", ): - await self.connect(user='md5_user', password='correctpassword') + await self.connect(user='md5_user', password=CORRECT_PASSWORD) class TestConnectParams(tb.TestCase): From 7cb4e70d88d165273997d914280c6d109fbbc8f6 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 24 Aug 2023 11:35:16 -0700 Subject: [PATCH 122/193] Support `target_session_attrs` in URL format, add tests (#1073) --- asyncpg/connect_utils.py | 7 +++++++ tests/test_connect.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index b91da671..9feef139 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -378,6 +378,13 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, 'ssl_max_protocol_version' ) + if 'target_session_attrs' in query: + dsn_target_session_attrs = query.pop( + 'target_session_attrs' + ) + if target_session_attrs is None: + target_session_attrs = dsn_target_session_attrs + if query: if server_settings is None: server_settings = query diff --git a/tests/test_connect.py b/tests/test_connect.py index 4c6fa4bd..171c2644 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -563,6 +563,42 @@ class TestConnectParams(tb.TestCase): }) }, + { + 'name': 'target_session_attrs', + 'dsn': 'postgresql://user@host1:1111,host2:2222/db' + '?target_session_attrs=read-only', + 'result': ([('host1', 1111), ('host2', 2222)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'read-only', + }) + }, + + { + 'name': 'target_session_attrs_2', + 'dsn': 'postgresql://user@host1:1111,host2:2222/db' + '?target_session_attrs=read-only', + 'target_session_attrs': 'read-write', + 'result': ([('host1', 1111), ('host2', 2222)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'read-write', + }) + }, + + { + 'name': 'target_session_attrs_3', + 'dsn': 'postgresql://user@host1:1111,host2:2222/db', + 'env': { + 'PGTARGETSESSIONATTRS': 'read-only', + }, + 'result': ([('host1', 1111), ('host2', 2222)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'read-only', + }) + }, + { 'name': 'dsn_ipv6_multi_host', 'dsn': 'postgresql://user@[2001:db8::1234%25eth0],[::1]/db', From f21ebf6474fa58cc9133667063683cae05175a09 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 7 Oct 2023 14:06:02 -0700 Subject: [PATCH 123/193] Disable JIT while doing type introspection (#1082) The misapplication of JIT to asyncpg introspection queries has been a constant source of user complaints. Closes: #530 Closes: #1078 Previously: #875, #794, #782, #741, #727 (and probably more). --- asyncpg/connection.py | 44 ++++++++++++++++++++++++++++++++++--- tests/test_introspection.py | 8 ++++++- 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 45cf99b1..06e4ce23 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -461,7 +461,26 @@ async def _get_statement( return statement async def _introspect_types(self, typeoids, timeout): - return await self.__execute( + if self._server_caps.jit: + try: + cfgrow, _ = await self.__execute( + """ + SELECT + current_setting('jit') AS cur, + set_config('jit', 'off', false) AS new + """, + (), + 0, + timeout, + ignore_custom_codec=True, + ) + jit_state = cfgrow[0]['cur'] + except exceptions.UndefinedObjectError: + jit_state = 'off' + else: + jit_state = 'off' + + result = await self.__execute( self._intro_query, (list(typeoids),), 0, @@ -469,6 +488,20 @@ async def _introspect_types(self, typeoids, timeout): ignore_custom_codec=True, ) + if jit_state != 'off': + await self.__execute( + """ + SELECT + set_config('jit', $1, false) + """, + (jit_state,), + 0, + timeout, + ignore_custom_codec=True, + ) + + return result + async def _introspect_type(self, typename, schema): if ( schema == 'pg_catalog' @@ -2370,7 +2403,7 @@ class _ConnectionProxy: ServerCapabilities = collections.namedtuple( 'ServerCapabilities', ['advisory_locks', 'notifications', 'plpgsql', 'sql_reset', - 'sql_close_all']) + 'sql_close_all', 'jit']) ServerCapabilities.__doc__ = 'PostgreSQL server capabilities.' @@ -2382,6 +2415,7 @@ def _detect_server_capabilities(server_version, connection_settings): plpgsql = False sql_reset = True sql_close_all = False + jit = False elif hasattr(connection_settings, 'crdb_version'): # CockroachDB detected. advisory_locks = False @@ -2389,6 +2423,7 @@ def _detect_server_capabilities(server_version, connection_settings): plpgsql = False sql_reset = False sql_close_all = False + jit = False elif hasattr(connection_settings, 'crate_version'): # CrateDB detected. advisory_locks = False @@ -2396,6 +2431,7 @@ def _detect_server_capabilities(server_version, connection_settings): plpgsql = False sql_reset = False sql_close_all = False + jit = False else: # Standard PostgreSQL server assumed. advisory_locks = True @@ -2403,13 +2439,15 @@ def _detect_server_capabilities(server_version, connection_settings): plpgsql = True sql_reset = True sql_close_all = True + jit = server_version >= (11, 0) return ServerCapabilities( advisory_locks=advisory_locks, notifications=notifications, plpgsql=plpgsql, sql_reset=sql_reset, - sql_close_all=sql_close_all + sql_close_all=sql_close_all, + jit=jit, ) diff --git a/tests/test_introspection.py b/tests/test_introspection.py index 78561dd0..bf95537a 100644 --- a/tests/test_introspection.py +++ b/tests/test_introspection.py @@ -43,6 +43,12 @@ def tearDownClass(cls): super().tearDownClass() + @classmethod + def get_server_settings(cls): + settings = super().get_server_settings() + settings.pop('jit', None) + return settings + def setUp(self): super().setUp() self.loop.run_until_complete(self._add_custom_codec(self.con)) @@ -124,7 +130,7 @@ async def test_introspection_no_stmt_cache_03(self): await self.con.fetchval( "SELECT $1::int[], '{foo}'".format(foo='a' * 10000), [1, 2]) - self.assertEqual(apg_con._uid, old_uid + 1) + self.assertGreater(apg_con._uid, old_uid) async def test_introspection_sticks_for_ps(self): # Test that the introspected codec pipeline for a prepared From 0c3bf600694065119a7a109eed1081f56e4f05a6 Mon Sep 17 00:00:00 2001 From: Vitaly Kirsanov Date: Mon, 9 Oct 2023 00:33:34 +0300 Subject: [PATCH 124/193] Issue #1020 Infinity numeric support (#1067) --- asyncpg/pgproto | 2 +- tests/test_codecs.py | 35 +++++++++++++++++++++++++---------- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/asyncpg/pgproto b/asyncpg/pgproto index a4178145..1c3cad14 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit a4178145cd7cc3a44eee20cfc9e8b94a7fed2053 +Subproject commit 1c3cad14d53c8f3088106f4eab8f612b7293569b diff --git a/tests/test_codecs.py b/tests/test_codecs.py index bffb2f1a..f466349a 100644 --- a/tests/test_codecs.py +++ b/tests/test_codecs.py @@ -633,17 +633,32 @@ async def test_numeric(self): "SELECT $1::numeric", decimal.Decimal('sNaN')) self.assertTrue(res.is_nan()) - with self.assertRaisesRegex(asyncpg.DataError, - 'numeric type does not ' - 'support infinite values'): - await self.con.fetchval( - "SELECT $1::numeric", decimal.Decimal('-Inf')) + if self.server_version < (14, 0): + with self.assertRaisesRegex( + asyncpg.DataError, + 'invalid sign in external "numeric" value' + ): + await self.con.fetchval( + "SELECT $1::numeric", decimal.Decimal('-Inf')) - with self.assertRaisesRegex(asyncpg.DataError, - 'numeric type does not ' - 'support infinite values'): - await self.con.fetchval( - "SELECT $1::numeric", decimal.Decimal('+Inf')) + with self.assertRaisesRegex( + asyncpg.DataError, + 'invalid sign in external "numeric" value' + ): + await self.con.fetchval( + "SELECT $1::numeric", decimal.Decimal('+Inf')) + + with self.assertRaisesRegex(asyncpg.DataError, 'invalid'): + await self.con.fetchval( + "SELECT $1::numeric", 'invalid') + else: + res = await self.con.fetchval( + "SELECT $1::numeric", decimal.Decimal("-Inf")) + self.assertTrue(res.is_infinite()) + + res = await self.con.fetchval( + "SELECT $1::numeric", decimal.Decimal("+Inf")) + self.assertTrue(res.is_infinite()) with self.assertRaisesRegex(asyncpg.DataError, 'invalid'): await self.con.fetchval( From deea86cebf69a2b489d5e0bfa8ba5b5a562b5839 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 8 Oct 2023 14:56:39 -0700 Subject: [PATCH 125/193] Test on Python 3.12 and PostgreSQL 16 (#1084) --- .github/workflows/release.yml | 16 ++++++++-------- .github/workflows/tests.yml | 11 +++++++---- README.rst | 2 +- asyncpg/exceptions/__init__.py | 5 +++++ docs/index.rst | 2 +- pyproject.toml | 7 ++++--- 6 files changed, 26 insertions(+), 17 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a294f9c3..52daf01a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -50,7 +50,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 50 submodules: true @@ -76,11 +76,11 @@ jobs: outputs: include: ${{ steps.set-matrix.outputs.include }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: "3.x" - - run: pip install cibuildwheel==2.13.1 + - run: pip install cibuildwheel==2.16.2 - id: set-matrix run: | MATRIX_INCLUDE=$( @@ -109,7 +109,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 50 submodules: true @@ -118,7 +118,7 @@ jobs: if: runner.os == 'Linux' uses: docker/setup-qemu-action@v2 - - uses: pypa/cibuildwheel@v2.13.1 + - uses: pypa/cibuildwheel@fff9ec32ed25a9c576750c91e06b410ed0c15db7 # v2.16.2 with: only: ${{ matrix.only }} env: @@ -138,7 +138,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 5 submodules: true @@ -154,7 +154,7 @@ jobs: make htmldocs - name: Checkout gh-pages - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 5 ref: gh-pages @@ -180,7 +180,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 5 submodules: false diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8c069709..7fc77b38 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,13 +17,16 @@ jobs: # job. strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: # uvloop does not support windows - loop: uvloop os: windows-latest + # No 3.12 release yet + - loop: uvloop + python-version: "3.12" runs-on: ${{ matrix.os }} @@ -35,7 +38,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 50 submodules: true @@ -76,7 +79,7 @@ jobs: test-postgres: strategy: matrix: - postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15"] + postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16"] runs-on: ubuntu-latest @@ -84,7 +87,7 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 50 submodules: true diff --git a/README.rst b/README.rst index ef2678db..438b4c44 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ framework. You can read more about asyncpg in an introductory `blog post `_. asyncpg requires Python 3.8 or later and is supported for PostgreSQL -versions 9.5 to 15. Older PostgreSQL versions or other databases implementing +versions 9.5 to 16. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/asyncpg/exceptions/__init__.py b/asyncpg/exceptions/__init__.py index c5b5ccc4..8c97d5a0 100644 --- a/asyncpg/exceptions/__init__.py +++ b/asyncpg/exceptions/__init__.py @@ -397,6 +397,10 @@ class SQLJsonScalarRequiredError(DataError): sqlstate = '2203F' +class SQLJsonItemCannotBeCastToTargetTypeError(DataError): + sqlstate = '2203G' + + class IntegrityConstraintViolationError(_base.PostgresError): sqlstate = '23000' @@ -1163,6 +1167,7 @@ class IndexCorruptedError(InternalServerError): 'ReadingExternalRoutineSQLDataNotPermittedError', 'ReadingSQLDataNotPermittedError', 'ReservedNameError', 'RestrictViolationError', 'SQLJsonArrayNotFoundError', + 'SQLJsonItemCannotBeCastToTargetTypeError', 'SQLJsonMemberNotFoundError', 'SQLJsonNumberNotFoundError', 'SQLJsonObjectNotFoundError', 'SQLJsonScalarRequiredError', 'SQLRoutineError', 'SQLStatementNotYetCompleteError', diff --git a/docs/index.rst b/docs/index.rst index dd4768b4..02a8457d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. **asyncpg** requires Python 3.8 or later and is supported for PostgreSQL -versions 9.5 to 15. Older PostgreSQL versions or other databases implementing +versions 9.5 to 16. Older PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. Contents diff --git a/pyproject.toml b/pyproject.toml index c9e4bdd9..72812da1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Database :: Front-Ends", ] @@ -32,8 +33,8 @@ github = "https://github.com/MagicStack/asyncpg" [project.optional-dependencies] test = [ - 'flake8~=5.0', - 'uvloop>=0.15.3; platform_system != "Windows"', + 'flake8~=6.1', + 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.12.0"', ] docs = [ 'Sphinx~=5.3.0', @@ -46,7 +47,7 @@ requires = [ "setuptools>=60", "wheel", - "Cython(>=0.29.24,<0.30.0)" + "Cython(>=0.29.24,<3.0.0)" ] build-backend = "setuptools.build_meta" From 4ddb039746a99b31cc4724329dd600b87a3f9469 Mon Sep 17 00:00:00 2001 From: ermakov-oleg Date: Mon, 9 Oct 2023 08:49:54 +0400 Subject: [PATCH 126/193] Remove connection parameter caching in `Pool` (#1053) Currently, `asyncpt.Pool` will cache various aspects of the connection, like the selected host and connection parameters in an attempt to make subsequent connection attempts somewhat faster. This behavior is dubious because server host availability and role may change, such as when a primary becomes a standby and vice-versa or when a host becomes unavailable permanently, but another host from the DSN can be picked up. Just remove it. --- asyncpg/pool.py | 46 +++++++------------------------- tests/test_pool.py | 66 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+), 37 deletions(-) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index eaf501f4..b02fe597 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -14,7 +14,6 @@ from . import compat from . import connection -from . import connect_utils from . import exceptions from . import protocol @@ -311,7 +310,6 @@ class Pool: __slots__ = ( '_queue', '_loop', '_minsize', '_maxsize', '_init', '_connect_args', '_connect_kwargs', - '_working_addr', '_working_config', '_working_params', '_holders', '_initialized', '_initializing', '_closing', '_closed', '_connection_class', '_record_class', '_generation', '_setup', '_max_queries', '_max_inactive_connection_lifetime' @@ -377,10 +375,6 @@ def __init__(self, *connect_args, self._initializing = False self._queue = None - self._working_addr = None - self._working_config = None - self._working_params = None - self._connection_class = connection_class self._record_class = record_class @@ -430,9 +424,8 @@ async def _initialize(self): # first few connections in the queue, therefore we want to walk # `self._holders` in reverse. - # Connect the first connection holder in the queue so that it - # can record `_working_addr` and `_working_opts`, which will - # speed up successive connection attempts. + # Connect the first connection holder in the queue so that + # any connection issues are visible early. first_ch = self._holders[-1] # type: PoolConnectionHolder await first_ch.connect() @@ -504,36 +497,15 @@ def set_connect_args(self, dsn=None, **connect_kwargs): self._connect_args = [dsn] self._connect_kwargs = connect_kwargs - self._working_addr = None - self._working_config = None - self._working_params = None async def _get_new_connection(self): - if self._working_addr is None: - # First connection attempt on this pool. - con = await connection.connect( - *self._connect_args, - loop=self._loop, - connection_class=self._connection_class, - record_class=self._record_class, - **self._connect_kwargs) - - self._working_addr = con._addr - self._working_config = con._config - self._working_params = con._params - - else: - # We've connected before and have a resolved address, - # and parsed options and config. - con = await connect_utils._connect_addr( - loop=self._loop, - addr=self._working_addr, - timeout=self._working_params.connect_timeout, - config=self._working_config, - params=self._working_params, - connection_class=self._connection_class, - record_class=self._record_class, - ) + con = await connection.connect( + *self._connect_args, + loop=self._loop, + connection_class=self._connection_class, + record_class=self._record_class, + **self._connect_kwargs, + ) if self._init is not None: try: diff --git a/tests/test_pool.py b/tests/test_pool.py index 540efb08..2407b817 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -8,6 +8,7 @@ import asyncio import inspect import os +import pathlib import platform import random import textwrap @@ -18,6 +19,7 @@ from asyncpg import _testbase as tb from asyncpg import connection as pg_connection from asyncpg import pool as pg_pool +from asyncpg import cluster as pg_cluster _system = platform.uname().system @@ -969,6 +971,70 @@ async def worker(): await pool.release(conn) +@unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster') +class TestPoolReconnectWithTargetSessionAttrs(tb.ClusterTestCase): + + @classmethod + def setup_cluster(cls): + cls.cluster = cls.new_cluster(pg_cluster.TempCluster) + cls.start_cluster(cls.cluster) + + async def simulate_cluster_recovery_mode(self): + port = self.cluster.get_connection_spec()['port'] + await self.loop.run_in_executor( + None, + lambda: self.cluster.stop() + ) + + # Simulate recovery mode + (pathlib.Path(self.cluster._data_dir) / 'standby.signal').touch() + + await self.loop.run_in_executor( + None, + lambda: self.cluster.start( + port=port, + server_settings=self.get_server_settings(), + ) + ) + + async def test_full_reconnect_on_node_change_role(self): + if self.cluster.get_pg_version() < (12, 0): + self.skipTest("PostgreSQL < 12 cannot support standby.signal") + return + + pool = await self.create_pool( + min_size=1, + max_size=1, + target_session_attrs='primary' + ) + + # Force a new connection to be created + await pool.fetchval('SELECT 1') + + await self.simulate_cluster_recovery_mode() + + # current pool connection info cache is expired, + # but we don't know it yet + with self.assertRaises(asyncpg.TargetServerAttributeNotMatched) as cm: + await pool.execute('SELECT 1') + + self.assertEqual( + cm.exception.args[0], + "None of the hosts match the target attribute requirement " + "" + ) + + # force reconnect + with self.assertRaises(asyncpg.TargetServerAttributeNotMatched) as cm: + await pool.execute('SELECT 1') + + self.assertEqual( + cm.exception.args[0], + "None of the hosts match the target attribute requirement " + "" + ) + + @unittest.skipIf(os.environ.get('PGHOST'), 'using remote cluster for testing') class TestHotStandby(tb.HotStandbyTestCase): def create_pool(self, **kwargs): From 4bdd8a7e73b45751a4cde02070518a9e7c4246b2 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Oct 2023 10:07:50 -0700 Subject: [PATCH 127/193] Switch to Python 3.12-style `wait_for` (#1086) `wait_for` has been a mess with respect to cancellations consistently in `asyncio`. Hopefully the approach taken in Python 3.12 solves the issues, so adopt that instead of trying to "fix" `wait_for` with wrappers on older Pythons. Use `async_timeout` as a polyfill on pre-3.11 Python. Closes: #1056 Closes: #1052 Fixes: #955 --- asyncpg/_asyncio_compat.py | 87 +++++++++++++++++++++++++++++++++++ asyncpg/compat.py | 20 ++------ asyncpg/protocol/protocol.pyx | 6 +-- pyproject.toml | 3 ++ 4 files changed, 98 insertions(+), 18 deletions(-) create mode 100644 asyncpg/_asyncio_compat.py diff --git a/asyncpg/_asyncio_compat.py b/asyncpg/_asyncio_compat.py new file mode 100644 index 00000000..ad7dfd8c --- /dev/null +++ b/asyncpg/_asyncio_compat.py @@ -0,0 +1,87 @@ +# Backports from Python/Lib/asyncio for older Pythons +# +# Copyright (c) 2001-2023 Python Software Foundation; All Rights Reserved +# +# SPDX-License-Identifier: PSF-2.0 + + +import asyncio +import functools +import sys + +if sys.version_info < (3, 11): + from async_timeout import timeout as timeout_ctx +else: + from asyncio import timeout as timeout_ctx + + +async def wait_for(fut, timeout): + """Wait for the single Future or coroutine to complete, with timeout. + + Coroutine will be wrapped in Task. + + Returns result of the Future or coroutine. When a timeout occurs, + it cancels the task and raises TimeoutError. To avoid the task + cancellation, wrap it in shield(). + + If the wait is cancelled, the task is also cancelled. + + If the task supresses the cancellation and returns a value instead, + that value is returned. + + This function is a coroutine. + """ + # The special case for timeout <= 0 is for the following case: + # + # async def test_waitfor(): + # func_started = False + # + # async def func(): + # nonlocal func_started + # func_started = True + # + # try: + # await asyncio.wait_for(func(), 0) + # except asyncio.TimeoutError: + # assert not func_started + # else: + # assert False + # + # asyncio.run(test_waitfor()) + + if timeout is not None and timeout <= 0: + fut = asyncio.ensure_future(fut) + + if fut.done(): + return fut.result() + + await _cancel_and_wait(fut) + try: + return fut.result() + except asyncio.CancelledError as exc: + raise TimeoutError from exc + + async with timeout_ctx(timeout): + return await fut + + +async def _cancel_and_wait(fut): + """Cancel the *fut* future or task and wait until it completes.""" + + loop = asyncio.get_running_loop() + waiter = loop.create_future() + cb = functools.partial(_release_waiter, waiter) + fut.add_done_callback(cb) + + try: + fut.cancel() + # We cannot wait on *fut* directly to make + # sure _cancel_and_wait itself is reliably cancellable. + await waiter + finally: + fut.remove_done_callback(cb) + + +def _release_waiter(waiter, *args): + if not waiter.done(): + waiter.set_result(None) diff --git a/asyncpg/compat.py b/asyncpg/compat.py index b9b13fa5..532c197a 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -5,10 +5,10 @@ # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 -import asyncio import pathlib import platform import typing +import sys SYSTEM = platform.uname().system @@ -49,17 +49,7 @@ async def wait_closed(stream): pass -# Workaround for https://bugs.python.org/issue37658 -async def wait_for(fut, timeout): - if timeout is None: - return await fut - - fut = asyncio.ensure_future(fut) - - try: - return await asyncio.wait_for(fut, timeout) - except asyncio.CancelledError: - if fut.done(): - return fut.result() - else: - raise +if sys.version_info < (3, 12): + from ._asyncio_compat import wait_for as wait_for # noqa: F401 +else: + from asyncio import wait_for as wait_for # noqa: F401 diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index f504d9d0..1f739cc2 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -249,7 +249,7 @@ cdef class BaseProtocol(CoreProtocol): while more: with timer: - await asyncio.wait_for( + await compat.wait_for( self.writing_allowed.wait(), timeout=timer.get_remaining_budget()) # On Windows the above event somehow won't allow context @@ -383,7 +383,7 @@ cdef class BaseProtocol(CoreProtocol): if buffer: try: with timer: - await asyncio.wait_for( + await compat.wait_for( sink(buffer), timeout=timer.get_remaining_budget()) except (Exception, asyncio.CancelledError) as ex: @@ -511,7 +511,7 @@ cdef class BaseProtocol(CoreProtocol): with timer: await self.writing_allowed.wait() with timer: - chunk = await asyncio.wait_for( + chunk = await compat.wait_for( iterator.__anext__(), timeout=timer.get_remaining_budget()) self._write_copy_data_msg(chunk) diff --git a/pyproject.toml b/pyproject.toml index 72812da1..ed2340a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,9 @@ classifiers = [ "Programming Language :: Python :: Implementation :: CPython", "Topic :: Database :: Front-Ends", ] +dependencies = [ + 'async_timeout>=4.0.3; python_version < "3.12.0"' +] [project.urls] github = "https://github.com/MagicStack/asyncpg" From 8b45beb4b60e30ad6ccd19d1e925ffdad6477aa2 Mon Sep 17 00:00:00 2001 From: ChimneySwift <28179563+ChimneySwift@users.noreply.github.com> Date: Tue, 10 Oct 2023 03:22:46 +1000 Subject: [PATCH 128/193] Update automatic PostGIS type conversion for Shapely 2.0 (#1085) Shapely 1.8 -> 2.0 depreciates the shapely.geometry.asShape() method in favor of shapely.geometry.shape(). source: https://shapely.readthedocs.io/en/stable/migration.html#other-deprecated-functionality --- docs/usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/usage.rst b/docs/usage.rst index 82a7a370..91897790 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -288,7 +288,7 @@ will work. if not hasattr(geometry, '__geo_interface__'): raise TypeError('{g} does not conform to ' 'the geo interface'.format(g=geometry)) - shape = shapely.geometry.asShape(geometry) + shape = shapely.geometry.shape(geometry) return shapely.wkb.dumps(shape) def decode_geometry(wkb): From 313b2b2bedcc10baf5871124ee915fdc48f5c4b7 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Oct 2023 10:27:56 -0700 Subject: [PATCH 129/193] Use the `timeout` context manager in the connection path (#1087) Drop timeout management gymnastics from the `connect()` path and use the `timeout` context manager instead. --- asyncpg/compat.py | 6 ++++++ asyncpg/connect_utils.py | 45 +++++++++++----------------------------- asyncpg/connection.py | 43 +++++++++++++++++++------------------- tests/test_adversity.py | 17 +++++++++++++++ tests/test_connect.py | 2 +- 5 files changed, 58 insertions(+), 55 deletions(-) diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 532c197a..3eec9eb7 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -53,3 +53,9 @@ async def wait_closed(stream): from ._asyncio_compat import wait_for as wait_for # noqa: F401 else: from asyncio import wait_for as wait_for # noqa: F401 + + +if sys.version_info < (3, 11): + from ._asyncio_compat import timeout_ctx as timeout # noqa: F401 +else: + from asyncio import timeout as timeout # noqa: F401 diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 9feef139..760e1297 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -20,7 +20,6 @@ import stat import struct import sys -import time import typing import urllib.parse import warnings @@ -55,7 +54,6 @@ def parse(cls, sslmode): 'ssl', 'sslmode', 'direct_tls', - 'connect_timeout', 'server_settings', 'target_session_attrs', ]) @@ -262,7 +260,7 @@ def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]: def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, - direct_tls, connect_timeout, server_settings, + direct_tls, server_settings, target_session_attrs): # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. @@ -655,14 +653,14 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, sslmode=sslmode, direct_tls=direct_tls, - connect_timeout=connect_timeout, server_settings=server_settings, + server_settings=server_settings, target_session_attrs=target_session_attrs) return addrs, params def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, - database, timeout, command_timeout, + database, command_timeout, statement_cache_size, max_cached_statement_lifetime, max_cacheable_statement_size, @@ -695,7 +693,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, ssl=ssl, direct_tls=direct_tls, database=database, - connect_timeout=timeout, server_settings=server_settings, + server_settings=server_settings, target_session_attrs=target_session_attrs) config = _ClientConfiguration( @@ -799,7 +797,6 @@ async def _connect_addr( *, addr, loop, - timeout, params, config, connection_class, @@ -807,9 +804,6 @@ async def _connect_addr( ): assert loop is not None - if timeout <= 0: - raise asyncio.TimeoutError - params_input = params if callable(params.password): password = params.password() @@ -827,21 +821,16 @@ async def _connect_addr( params_retry = params._replace(ssl=None) else: # skip retry if we don't have to - return await __connect_addr(params, timeout, False, *args) + return await __connect_addr(params, False, *args) # first attempt - before = time.monotonic() try: - return await __connect_addr(params, timeout, True, *args) + return await __connect_addr(params, True, *args) except _RetryConnectSignal: pass # second attempt - timeout -= time.monotonic() - before - if timeout <= 0: - raise asyncio.TimeoutError - else: - return await __connect_addr(params_retry, timeout, False, *args) + return await __connect_addr(params_retry, False, *args) class _RetryConnectSignal(Exception): @@ -850,7 +839,6 @@ class _RetryConnectSignal(Exception): async def __connect_addr( params, - timeout, retry, addr, loop, @@ -882,15 +870,10 @@ async def __connect_addr( else: connector = loop.create_connection(proto_factory, *addr) - connector = asyncio.ensure_future(connector) - before = time.monotonic() - tr, pr = await compat.wait_for(connector, timeout=timeout) - timeout -= time.monotonic() - before + tr, pr = await connector try: - if timeout <= 0: - raise asyncio.TimeoutError - await compat.wait_for(connected, timeout=timeout) + await connected except ( exceptions.InvalidAuthorizationSpecificationError, exceptions.ConnectionDoesNotExistError, # seen on Windows @@ -993,23 +976,21 @@ async def _can_use_connection(connection, attr: SessionAttribute): return await can_use(connection) -async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): +async def _connect(*, loop, connection_class, record_class, **kwargs): if loop is None: loop = asyncio.get_event_loop() - addrs, params, config = _parse_connect_arguments(timeout=timeout, **kwargs) + addrs, params, config = _parse_connect_arguments(**kwargs) target_attr = params.target_session_attrs candidates = [] chosen_connection = None last_error = None for addr in addrs: - before = time.monotonic() try: conn = await _connect_addr( addr=addr, loop=loop, - timeout=timeout, params=params, config=config, connection_class=connection_class, @@ -1019,10 +1000,8 @@ async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): if await _can_use_connection(conn, target_attr): chosen_connection = conn break - except (OSError, asyncio.TimeoutError, ConnectionError) as ex: + except OSError as ex: last_error = ex - finally: - timeout -= time.monotonic() - before else: if target_attr == SessionAttribute.prefer_standby and candidates: chosen_connection = random.choice(candidates) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 06e4ce23..810227c7 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -20,6 +20,7 @@ import warnings import weakref +from . import compat from . import connect_utils from . import cursor from . import exceptions @@ -2184,27 +2185,27 @@ async def connect(dsn=None, *, if loop is None: loop = asyncio.get_event_loop() - return await connect_utils._connect( - loop=loop, - timeout=timeout, - connection_class=connection_class, - record_class=record_class, - dsn=dsn, - host=host, - port=port, - user=user, - password=password, - passfile=passfile, - ssl=ssl, - direct_tls=direct_tls, - database=database, - server_settings=server_settings, - command_timeout=command_timeout, - statement_cache_size=statement_cache_size, - max_cached_statement_lifetime=max_cached_statement_lifetime, - max_cacheable_statement_size=max_cacheable_statement_size, - target_session_attrs=target_session_attrs - ) + async with compat.timeout(timeout): + return await connect_utils._connect( + loop=loop, + connection_class=connection_class, + record_class=record_class, + dsn=dsn, + host=host, + port=port, + user=user, + password=password, + passfile=passfile, + ssl=ssl, + direct_tls=direct_tls, + database=database, + server_settings=server_settings, + command_timeout=command_timeout, + statement_cache_size=statement_cache_size, + max_cached_statement_lifetime=max_cached_statement_lifetime, + max_cacheable_statement_size=max_cacheable_statement_size, + target_session_attrs=target_session_attrs + ) class _StatementCacheEntry: diff --git a/tests/test_adversity.py b/tests/test_adversity.py index 71532317..a6e03feb 100644 --- a/tests/test_adversity.py +++ b/tests/test_adversity.py @@ -26,6 +26,23 @@ async def test_connection_close_timeout(self): with self.assertRaises(asyncio.TimeoutError): await con.close(timeout=0.5) + @tb.with_timeout(30.0) + async def test_pool_acquire_timeout(self): + pool = await self.create_pool( + database='postgres', min_size=2, max_size=2) + try: + self.proxy.trigger_connectivity_loss() + for _ in range(2): + with self.assertRaises(asyncio.TimeoutError): + async with pool.acquire(timeout=0.5): + pass + self.proxy.restore_connectivity() + async with pool.acquire(timeout=0.5): + pass + finally: + self.proxy.restore_connectivity() + pool.terminate() + @tb.with_timeout(30.0) async def test_pool_release_timeout(self): pool = await self.create_pool( diff --git a/tests/test_connect.py b/tests/test_connect.py index 171c2644..f61db61a 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -891,7 +891,7 @@ def run_testcase(self, testcase): addrs, params = connect_utils._parse_connect_dsn_and_args( dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, database=database, ssl=sslmode, - direct_tls=False, connect_timeout=None, + direct_tls=False, server_settings=server_settings, target_session_attrs=target_session_attrs) From ccc7baf94cd45fc0155ae522361a6f2b2f551c44 Mon Sep 17 00:00:00 2001 From: Scott Fredericksen Date: Mon, 9 Oct 2023 11:38:54 -0600 Subject: [PATCH 130/193] Small fix for documentation on using SSL in Connection (#995) The sample code shows asyncio.run(run()), but the function to be run is named "main()". So this sample does not run. --- asyncpg/connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 810227c7..c0053733 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2041,7 +2041,7 @@ async def connect(dsn=None, *, ... ) ... con = await asyncpg.connect(user='postgres', ssl=sslctx) ... await con.close() - >>> asyncio.run(run()) + >>> asyncio.run(main()) Example of programmatic SSL context configuration that is equivalent to ``sslmode=require`` (no server certificate or host verification): @@ -2058,7 +2058,7 @@ async def connect(dsn=None, *, ... sslctx.verify_mode = ssl.CERT_NONE ... con = await asyncpg.connect(user='postgres', ssl=sslctx) ... await con.close() - >>> asyncio.run(run()) + >>> asyncio.run(main()) :param bool direct_tls: Pass ``True`` to skip PostgreSQL STARTTLS mode and perform a direct From 70c8bd814f9d289741845fa8dc761737f7ea4e78 Mon Sep 17 00:00:00 2001 From: Ilya Beda Date: Tue, 10 Oct 2023 04:44:33 +1100 Subject: [PATCH 131/193] Use cleanup_ctx in pool usage doc (#878) Co-authored-by: Elvis Pranskevichus --- docs/usage.rst | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/usage.rst b/docs/usage.rst index 91897790..21281b6b 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -438,20 +438,26 @@ Web service that computes the requested power of two. text="2 ^ {} is {}".format(power, result)) - async def init_app(): + async def init_db(app): + """Initialize a connection pool.""" + app['pool'] = await asyncpg.create_pool(database='postgres', + user='postgres') + yield + app['pool'].close() + + + def init_app(): """Initialize the application server.""" app = web.Application() - # Create a database connection pool - app['pool'] = await asyncpg.create_pool(database='postgres', - user='postgres') + # Create a database context + app.cleanup_ctx.append(init_db) # Configure service routes app.router.add_route('GET', '/{power:\d+}', handle) app.router.add_route('GET', '/', handle) return app - loop = asyncio.get_event_loop() - app = loop.run_until_complete(init_app()) + app = init_app() web.run_app(app) See :ref:`asyncpg-api-pool` API documentation for more information. From b7ffab6cb3f9bcc5d4f9c5b22398ca9c9e450af2 Mon Sep 17 00:00:00 2001 From: Kaylynn Morgan <51037748+kaylynn234@users.noreply.github.com> Date: Tue, 10 Oct 2023 05:46:05 +1100 Subject: [PATCH 132/193] Add support for the `WHERE` clause in `copy_to` methods (#941) --- asyncpg/connection.py | 58 ++++++++++++++++++++++++++++++++----- asyncpg/exceptions/_base.py | 7 ++++- asyncpg/pool.py | 12 +++++--- tests/test_copy.py | 35 ++++++++++++++++++++-- 4 files changed, 98 insertions(+), 14 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index c0053733..ba0e14fe 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -866,7 +866,7 @@ async def copy_to_table(self, table_name, *, source, delimiter=None, null=None, header=None, quote=None, escape=None, force_quote=None, force_not_null=None, force_null=None, - encoding=None): + encoding=None, where=None): """Copy data to the specified table. :param str table_name: @@ -885,6 +885,15 @@ async def copy_to_table(self, table_name, *, source, :param str schema_name: An optional schema name to qualify the table. + :param str where: + An optional SQL expression used to filter rows when copying. + + .. note:: + + Usage of this parameter requires support for the + ``COPY FROM ... WHERE`` syntax, introduced in + PostgreSQL version 12. + :param float timeout: Optional timeout value in seconds. @@ -912,6 +921,9 @@ async def copy_to_table(self, table_name, *, source, https://www.postgresql.org/docs/current/static/sql-copy.html .. versionadded:: 0.11.0 + + .. versionadded:: 0.29.0 + Added the *where* parameter. """ tabname = utils._quote_ident(table_name) if schema_name: @@ -923,6 +935,7 @@ async def copy_to_table(self, table_name, *, source, else: cols = '' + cond = self._format_copy_where(where) opts = self._format_copy_opts( format=format, oids=oids, freeze=freeze, delimiter=delimiter, null=null, header=header, quote=quote, escape=escape, @@ -930,14 +943,14 @@ async def copy_to_table(self, table_name, *, source, encoding=encoding ) - copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts}'.format( - tab=tabname, cols=cols, opts=opts) + copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts} {cond}'.format( + tab=tabname, cols=cols, opts=opts, cond=cond) return await self._copy_in(copy_stmt, source, timeout) async def copy_records_to_table(self, table_name, *, records, columns=None, schema_name=None, - timeout=None): + timeout=None, where=None): """Copy a list of records to the specified table using binary COPY. :param str table_name: @@ -954,6 +967,16 @@ async def copy_records_to_table(self, table_name, *, records, :param str schema_name: An optional schema name to qualify the table. + :param str where: + An optional SQL expression used to filter rows when copying. + + .. note:: + + Usage of this parameter requires support for the + ``COPY FROM ... WHERE`` syntax, introduced in + PostgreSQL version 12. + + :param float timeout: Optional timeout value in seconds. @@ -998,6 +1021,9 @@ async def copy_records_to_table(self, table_name, *, records, .. versionchanged:: 0.24.0 The ``records`` argument may be an asynchronous iterable. + + .. versionadded:: 0.29.0 + Added the *where* parameter. """ tabname = utils._quote_ident(table_name) if schema_name: @@ -1015,14 +1041,27 @@ async def copy_records_to_table(self, table_name, *, records, intro_ps = await self._prepare(intro_query, use_cache=True) + cond = self._format_copy_where(where) opts = '(FORMAT binary)' - copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts}'.format( - tab=tabname, cols=cols, opts=opts) + copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts} {cond}'.format( + tab=tabname, cols=cols, opts=opts, cond=cond) return await self._protocol.copy_in( copy_stmt, None, None, records, intro_ps._state, timeout) + def _format_copy_where(self, where): + if where and not self._server_caps.sql_copy_from_where: + raise exceptions.UnsupportedServerFeatureError( + 'the `where` parameter requires PostgreSQL 12 or later') + + if where: + where_clause = 'WHERE ' + where + else: + where_clause = '' + + return where_clause + def _format_copy_opts(self, *, format=None, oids=None, freeze=None, delimiter=None, null=None, header=None, quote=None, escape=None, force_quote=None, force_not_null=None, @@ -2404,7 +2443,7 @@ class _ConnectionProxy: ServerCapabilities = collections.namedtuple( 'ServerCapabilities', ['advisory_locks', 'notifications', 'plpgsql', 'sql_reset', - 'sql_close_all', 'jit']) + 'sql_close_all', 'sql_copy_from_where', 'jit']) ServerCapabilities.__doc__ = 'PostgreSQL server capabilities.' @@ -2417,6 +2456,7 @@ def _detect_server_capabilities(server_version, connection_settings): sql_reset = True sql_close_all = False jit = False + sql_copy_from_where = False elif hasattr(connection_settings, 'crdb_version'): # CockroachDB detected. advisory_locks = False @@ -2425,6 +2465,7 @@ def _detect_server_capabilities(server_version, connection_settings): sql_reset = False sql_close_all = False jit = False + sql_copy_from_where = False elif hasattr(connection_settings, 'crate_version'): # CrateDB detected. advisory_locks = False @@ -2433,6 +2474,7 @@ def _detect_server_capabilities(server_version, connection_settings): sql_reset = False sql_close_all = False jit = False + sql_copy_from_where = False else: # Standard PostgreSQL server assumed. advisory_locks = True @@ -2441,6 +2483,7 @@ def _detect_server_capabilities(server_version, connection_settings): sql_reset = True sql_close_all = True jit = server_version >= (11, 0) + sql_copy_from_where = server_version.major >= 12 return ServerCapabilities( advisory_locks=advisory_locks, @@ -2448,6 +2491,7 @@ def _detect_server_capabilities(server_version, connection_settings): plpgsql=plpgsql, sql_reset=sql_reset, sql_close_all=sql_close_all, + sql_copy_from_where=sql_copy_from_where, jit=jit, ) diff --git a/asyncpg/exceptions/_base.py b/asyncpg/exceptions/_base.py index e2da6bd8..00e9699a 100644 --- a/asyncpg/exceptions/_base.py +++ b/asyncpg/exceptions/_base.py @@ -12,9 +12,10 @@ __all__ = ('PostgresError', 'FatalPostgresError', 'UnknownPostgresError', 'InterfaceError', 'InterfaceWarning', 'PostgresLogMessage', + 'ClientConfigurationError', 'InternalClientError', 'OutdatedSchemaCacheError', 'ProtocolError', 'UnsupportedClientFeatureError', 'TargetServerAttributeNotMatched', - 'ClientConfigurationError') + 'UnsupportedServerFeatureError') def _is_asyncpg_class(cls): @@ -233,6 +234,10 @@ class UnsupportedClientFeatureError(InterfaceError): """Requested feature is unsupported by asyncpg.""" +class UnsupportedServerFeatureError(InterfaceError): + """Requested feature is unsupported by PostgreSQL server.""" + + class InterfaceWarning(InterfaceMessage, UserWarning): """A warning caused by an improper use of asyncpg API.""" diff --git a/asyncpg/pool.py b/asyncpg/pool.py index b02fe597..06e698df 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -711,7 +711,8 @@ async def copy_to_table( force_quote=None, force_not_null=None, force_null=None, - encoding=None + encoding=None, + where=None ): """Copy data to the specified table. @@ -740,7 +741,8 @@ async def copy_to_table( force_quote=force_quote, force_not_null=force_not_null, force_null=force_null, - encoding=encoding + encoding=encoding, + where=where ) async def copy_records_to_table( @@ -750,7 +752,8 @@ async def copy_records_to_table( records, columns=None, schema_name=None, - timeout=None + timeout=None, + where=None ): """Copy a list of records to the specified table using binary COPY. @@ -767,7 +770,8 @@ async def copy_records_to_table( records=records, columns=columns, schema_name=schema_name, - timeout=timeout + timeout=timeout, + where=where ) def acquire(self, *, timeout=None): diff --git a/tests/test_copy.py b/tests/test_copy.py index 70c9388e..be2aabaf 100644 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -10,6 +10,7 @@ import io import os import tempfile +import unittest import asyncpg from asyncpg import _testbase as tb @@ -414,7 +415,7 @@ async def test_copy_to_table_basics(self): '*a4*|b4', '*a5*|b5', '*!**|*n-u-l-l*', - 'n-u-l-l|bb' + 'n-u-l-l|bb', ]).encode('utf-8') ) f.seek(0) @@ -644,6 +645,35 @@ async def test_copy_records_to_table_1(self): finally: await self.con.execute('DROP TABLE copytab') + async def test_copy_records_to_table_where(self): + if not self.con._server_caps.sql_copy_from_where: + raise unittest.SkipTest( + 'COPY WHERE not supported on server') + + await self.con.execute(''' + CREATE TABLE copytab_where(a text, b int, c timestamptz); + ''') + + try: + date = datetime.datetime.now(tz=datetime.timezone.utc) + delta = datetime.timedelta(days=1) + + records = [ + ('a-{}'.format(i), i, date + delta) + for i in range(100) + ] + + records.append(('a-100', None, None)) + records.append(('b-999', None, None)) + + res = await self.con.copy_records_to_table( + 'copytab_where', records=records, where='a <> \'b-999\'') + + self.assertEqual(res, 'COPY 101') + + finally: + await self.con.execute('DROP TABLE copytab_where') + async def test_copy_records_to_table_async(self): await self.con.execute(''' CREATE TABLE copytab_async(a text, b int, c timestamptz); @@ -660,7 +690,8 @@ async def record_generator(): yield ('a-100', None, None) res = await self.con.copy_records_to_table( - 'copytab_async', records=record_generator()) + 'copytab_async', records=record_generator(), + ) self.assertEqual(res, 'COPY 101') From ca9f03be3c64984311dbefbbd9e8ff0806a7f772 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 9 Oct 2023 12:34:02 -0700 Subject: [PATCH 133/193] Close cursor portals once the iterator is exhausted (#1088) When iterating on a cursor, make sure to close the portal once iteration is done. This prevents the cursor from holding onto resources until the end of transaction. Fixes: #1008 --- asyncpg/cursor.py | 16 +++++++++++++++- asyncpg/protocol/protocol.pyx | 23 +++++++++++++++++++++++ tests/test_cursor.py | 20 +++++++++++++++----- 3 files changed, 53 insertions(+), 6 deletions(-) diff --git a/asyncpg/cursor.py b/asyncpg/cursor.py index 7ec159ba..b4abeed1 100644 --- a/asyncpg/cursor.py +++ b/asyncpg/cursor.py @@ -158,6 +158,17 @@ async def _exec(self, n, timeout): self._state, self._portal_name, n, True, timeout) return buffer + async def _close_portal(self, timeout): + self._check_ready() + + if not self._portal_name: + raise exceptions.InterfaceError( + 'cursor does not have an open portal') + + protocol = self._connection._protocol + await protocol.close_portal(self._portal_name, timeout) + self._portal_name = None + def __repr__(self): attrs = [] if self._exhausted: @@ -219,7 +230,7 @@ async def __anext__(self): ) self._state.attach() - if not self._portal_name: + if not self._portal_name and not self._exhausted: buffer = await self._bind_exec(self._prefetch, self._timeout) self._buffer.extend(buffer) @@ -227,6 +238,9 @@ async def __anext__(self): buffer = await self._exec(self._prefetch, self._timeout) self._buffer.extend(buffer) + if self._portal_name and self._exhausted: + await self._close_portal(self._timeout) + if self._buffer: return self._buffer.popleft() diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 1f739cc2..76c62dfc 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -327,6 +327,29 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter + @cython.iterable_coroutine + async def close_portal(self, str portal_name, timeout): + + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + timeout = self._get_timeout_impl(timeout) + + waiter = self._new_waiter(timeout) + try: + self._close( + portal_name, + True) # network op + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + @cython.iterable_coroutine async def query(self, query, timeout): if self.cancel_waiter is not None: diff --git a/tests/test_cursor.py b/tests/test_cursor.py index 565def85..ad446bc3 100644 --- a/tests/test_cursor.py +++ b/tests/test_cursor.py @@ -84,11 +84,21 @@ async def test_cursor_iterable_06(self): recs = [] async with self.con.transaction(): - async for rec in self.con.cursor( - 'SELECT generate_series(0, $1::int)', 10): - recs.append(rec) - - self.assertEqual(recs, [(i,) for i in range(11)]) + await self.con.execute(''' + CREATE TABLE cursor_iterable_06 (id int); + INSERT INTO cursor_iterable_06 VALUES (0), (1); + ''') + try: + cur = self.con.cursor('SELECT * FROM cursor_iterable_06') + async for rec in cur: + recs.append(rec) + finally: + # Check that after iteration has exhausted the cursor, + # its associated portal is closed properly, unlocking + # the table. + await self.con.execute('DROP TABLE cursor_iterable_06') + + self.assertEqual(recs, [(i,) for i in range(2)]) class TestCursor(tb.ConnectedTestCase): From 93a6f79afb4251fa48edf6959aa87532eee91ef5 Mon Sep 17 00:00:00 2001 From: Andrew Geng Date: Mon, 9 Oct 2023 15:51:22 -0400 Subject: [PATCH 134/193] Cut BaseProtocol circular reference on close. (#1049) A bound method contains a reference to the instance it's bound to. Most of the time, bound methods are created lazily at access time by the descriptor protocol and discarded after calling. But saving a bound method as another attribute on the instance creates a long-lived cycle, here `.timeout_callback.__self__`, that needs to be explicitly broken if we don't want to wake up python's garbage collector to do it. Without this change, the new assertion in the tests would fail, and `pytest --pdb` would show the bound methods `_on_timeout` and `_on_waiter_completed` at the end of `p gc.get_referrers(protoref())`. [Also, unset `transport` in `Protocol.abort()` to break another cycle] Co-authored-by: Elvis Pranskevichus --- asyncpg/protocol/protocol.pxd | 2 -- asyncpg/protocol/protocol.pyx | 7 +++---- tests/test_connect.py | 30 ++++++++++++++++++++++-------- 3 files changed, 25 insertions(+), 14 deletions(-) diff --git a/asyncpg/protocol/protocol.pxd b/asyncpg/protocol/protocol.pxd index 5f144e55..a9ac8d5f 100644 --- a/asyncpg/protocol/protocol.pxd +++ b/asyncpg/protocol/protocol.pxd @@ -39,8 +39,6 @@ cdef class BaseProtocol(CoreProtocol): bint return_extra object create_future object timeout_handle - object timeout_callback - object completed_callback object conref type record_class bint is_reading diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 76c62dfc..b43b0e9c 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -98,8 +98,6 @@ cdef class BaseProtocol(CoreProtocol): self.writing_allowed.set() self.timeout_handle = None - self.timeout_callback = self._on_timeout - self.completed_callback = self._on_waiter_completed self.queries_count = 0 @@ -607,6 +605,7 @@ cdef class BaseProtocol(CoreProtocol): self._handle_waiter_on_connection_lost(None) self._terminate() self.transport.abort() + self.transport = None @cython.iterable_coroutine async def close(self, timeout): @@ -777,8 +776,8 @@ cdef class BaseProtocol(CoreProtocol): self.waiter = self.create_future() if timeout is not None: self.timeout_handle = self.loop.call_later( - timeout, self.timeout_callback, self.waiter) - self.waiter.add_done_callback(self.completed_callback) + timeout, self._on_timeout, self.waiter) + self.waiter.add_done_callback(self._on_waiter_completed) return self.waiter cdef _on_result__connect(self, object waiter): diff --git a/tests/test_connect.py b/tests/test_connect.py index f61db61a..1af074f1 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -7,6 +7,7 @@ import asyncio import contextlib +import gc import ipaddress import os import pathlib @@ -1846,14 +1847,27 @@ async def worker(): class TestConnectionGC(tb.ClusterTestCase): async def _run_no_explicit_close_test(self): - con = await self.connect() - await con.fetchval("select 123") - proto = con._protocol - conref = weakref.ref(con) - del con - - self.assertIsNone(conref()) - self.assertTrue(proto.is_closed()) + gc_was_enabled = gc.isenabled() + gc.disable() + try: + con = await self.connect() + await con.fetchval("select 123") + proto = con._protocol + conref = weakref.ref(con) + del con + + self.assertIsNone(conref()) + self.assertTrue(proto.is_closed()) + + # tick event loop; asyncio.selector_events._SelectorSocketTransport + # needs a chance to close itself and remove its reference to proto + await asyncio.sleep(0) + protoref = weakref.ref(proto) + del proto + self.assertIsNone(protoref()) + finally: + if gc_was_enabled: + gc.enable() async def test_no_explicit_close_no_debug(self): olddebug = self.loop.get_debug() From b2697ffdf18f7acd88a35e9a0a252c3b6fb25070 Mon Sep 17 00:00:00 2001 From: Dan Watson Date: Mon, 9 Oct 2023 16:15:30 -0400 Subject: [PATCH 135/193] Add query logging callbacks and context manager (#1043) --- asyncpg/connection.py | 128 ++++++++++++++++++++++++++++++++++++++---- tests/test_logging.py | 51 +++++++++++++++++ 2 files changed, 169 insertions(+), 10 deletions(-) create mode 100644 tests/test_logging.py diff --git a/asyncpg/connection.py b/asyncpg/connection.py index ba0e14fe..0367e365 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -9,6 +9,7 @@ import asyncpg import collections import collections.abc +import contextlib import functools import itertools import inspect @@ -53,7 +54,7 @@ class Connection(metaclass=ConnectionMeta): '_intro_query', '_reset_query', '_proxy', '_stmt_exclusive_section', '_config', '_params', '_addr', '_log_listeners', '_termination_listeners', '_cancellations', - '_source_traceback', '__weakref__') + '_source_traceback', '_query_loggers', '__weakref__') def __init__(self, protocol, transport, loop, addr, @@ -87,6 +88,7 @@ def __init__(self, protocol, transport, loop, self._log_listeners = set() self._cancellations = set() self._termination_listeners = set() + self._query_loggers = set() settings = self._protocol.get_settings() ver_string = settings.server_version @@ -224,6 +226,30 @@ def remove_termination_listener(self, callback): """ self._termination_listeners.discard(_Callback.from_callable(callback)) + def add_query_logger(self, callback): + """Add a logger that will be called when queries are executed. + + :param callable callback: + A callable or a coroutine function receiving one argument: + **record**: a LoggedQuery containing `query`, `args`, `timeout`, + `elapsed`, `exception`, `conn_addr`, and + `conn_params`. + + .. versionadded:: 0.29.0 + """ + self._query_loggers.add(_Callback.from_callable(callback)) + + def remove_query_logger(self, callback): + """Remove a query logger callback. + + :param callable callback: + The callable or coroutine function that was passed to + :meth:`Connection.add_query_logger`. + + .. versionadded:: 0.29.0 + """ + self._query_loggers.discard(_Callback.from_callable(callback)) + def get_server_pid(self): """Return the PID of the Postgres server the connection is bound to.""" return self._protocol.get_server_pid() @@ -317,7 +343,12 @@ async def execute(self, query: str, *args, timeout: float=None) -> str: self._check_open() if not args: - return await self._protocol.query(query, timeout) + if self._query_loggers: + with self._time_and_log(query, args, timeout): + result = await self._protocol.query(query, timeout) + else: + result = await self._protocol.query(query, timeout) + return result _, status, _ = await self._execute( query, @@ -1487,6 +1518,7 @@ def _cleanup(self): self._mark_stmts_as_closed() self._listeners.clear() self._log_listeners.clear() + self._query_loggers.clear() self._clean_tasks() def _clean_tasks(self): @@ -1770,6 +1802,63 @@ async def _execute( ) return result + @contextlib.contextmanager + def query_logger(self, callback): + """Context manager that adds `callback` to the list of query loggers, + and removes it upon exit. + + :param callable callback: + A callable or a coroutine function receiving one argument: + **record**: a LoggedQuery containing `query`, `args`, `timeout`, + `elapsed`, `exception`, `conn_addr`, and + `conn_params`. + + Example: + + .. code-block:: pycon + + >>> class QuerySaver: + def __init__(self): + self.queries = [] + def __call__(self, record): + self.queries.append(record.query) + >>> with con.query_logger(QuerySaver()): + >>> await con.execute("SELECT 1") + >>> print(log.queries) + ['SELECT 1'] + + .. versionadded:: 0.29.0 + """ + self.add_query_logger(callback) + yield + self.remove_query_logger(callback) + + @contextlib.contextmanager + def _time_and_log(self, query, args, timeout): + start = time.monotonic() + exception = None + try: + yield + except BaseException as ex: + exception = ex + raise + finally: + elapsed = time.monotonic() - start + record = LoggedQuery( + query=query, + args=args, + timeout=timeout, + elapsed=elapsed, + exception=exception, + conn_addr=self._addr, + conn_params=self._params, + ) + for cb in self._query_loggers: + if cb.is_async: + self._loop.create_task(cb.cb(record)) + else: + self._loop.call_soon(cb.cb, record) + async def __execute( self, query, @@ -1790,13 +1879,24 @@ async def __execute( timeout=timeout, ) timeout = self._protocol._get_timeout(timeout) - return await self._do_execute( - query, - executor, - timeout, - record_class=record_class, - ignore_custom_codec=ignore_custom_codec, - ) + if self._query_loggers: + with self._time_and_log(query, args, timeout): + result, stmt = await self._do_execute( + query, + executor, + timeout, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + else: + result, stmt = await self._do_execute( + query, + executor, + timeout, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + return result, stmt async def _executemany(self, query, args, timeout): executor = lambda stmt, timeout: self._protocol.bind_execute_many( @@ -1807,7 +1907,8 @@ async def _executemany(self, query, args, timeout): ) timeout = self._protocol._get_timeout(timeout) with self._stmt_exclusive_section: - result, _ = await self._do_execute(query, executor, timeout) + with self._time_and_log(query, args, timeout): + result, _ = await self._do_execute(query, executor, timeout) return result async def _do_execute( @@ -2440,6 +2541,13 @@ class _ConnectionProxy: __slots__ = () +LoggedQuery = collections.namedtuple( + 'LoggedQuery', + ['query', 'args', 'timeout', 'elapsed', 'exception', 'conn_addr', + 'conn_params']) +LoggedQuery.__doc__ = 'Log record of an executed query.' + + ServerCapabilities = collections.namedtuple( 'ServerCapabilities', ['advisory_locks', 'notifications', 'plpgsql', 'sql_reset', diff --git a/tests/test_logging.py b/tests/test_logging.py new file mode 100644 index 00000000..a9af94c4 --- /dev/null +++ b/tests/test_logging.py @@ -0,0 +1,51 @@ +import asyncio + +from asyncpg import _testbase as tb +from asyncpg import exceptions + + +class LogCollector: + def __init__(self): + self.records = [] + + def __call__(self, record): + self.records.append(record) + + +class TestQueryLogging(tb.ConnectedTestCase): + + async def test_logging_context(self): + queries = asyncio.Queue() + + def query_saver(record): + queries.put_nowait(record) + + log = LogCollector() + + with self.con.query_logger(query_saver): + self.assertEqual(len(self.con._query_loggers), 1) + await self.con.execute("SELECT 1") + with self.con.query_logger(log): + self.assertEqual(len(self.con._query_loggers), 2) + await self.con.execute("SELECT 2") + + r1 = await queries.get() + r2 = await queries.get() + self.assertEqual(r1.query, "SELECT 1") + self.assertEqual(r2.query, "SELECT 2") + self.assertEqual(len(log.records), 1) + self.assertEqual(log.records[0].query, "SELECT 2") + self.assertEqual(len(self.con._query_loggers), 0) + + async def test_error_logging(self): + log = LogCollector() + with self.con.query_logger(log): + with self.assertRaises(exceptions.UndefinedColumnError): + await self.con.execute("SELECT x") + + await asyncio.sleep(0) # wait for logging + self.assertEqual(len(log.records), 1) + self.assertEqual( + type(log.records[0].exception), + exceptions.UndefinedColumnError + ) From d7faaff57a7a9c0029a31f09564d30ab35007907 Mon Sep 17 00:00:00 2001 From: Marcel <62351477+lezram@users.noreply.github.com> Date: Wed, 11 Oct 2023 00:37:14 +0200 Subject: [PATCH 136/193] fix: allow host tuple (#1021) --- asyncpg/connect_utils.py | 2 +- tests/test_connect.py | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 760e1297..414231fd 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -403,7 +403,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, host = ['/run/postgresql', '/var/run/postgresql', '/tmp', '/private/tmp', 'localhost'] - if not isinstance(host, list): + if not isinstance(host, (list, tuple)): host = [host] if auth_hosts is None: diff --git a/tests/test_connect.py b/tests/test_connect.py index 1af074f1..5333e2c5 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -671,6 +671,19 @@ class TestConnectParams(tb.TestCase): 'target_session_attrs': 'any', }) }, + { + 'name': 'params_multi_host_dsn_env_mix_tuple', + 'env': { + 'PGUSER': 'foo', + }, + 'dsn': 'postgresql:///db', + 'host': ('host1', 'host2'), + 'result': ([('host1', 5432), ('host2', 5432)], { + 'database': 'db', + 'user': 'foo', + 'target_session_attrs': 'any', + }) + }, { 'name': 'params_combine_dsn_settings_override_and_ssl', From 74f3a0031532b314d9141b6de12aa74db7726b3d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 4 Nov 2023 21:37:18 -0700 Subject: [PATCH 137/193] asyncpg v0.29.0 Minor fixes and improvements. Improvements ============ * Python 3.12 and PostgreSQL 16 support (#1084) (by @elprans in deea86ce) * Add support for tuple-format custom codecs on composite types (#1061) (by @elprans in 922fcd10) * Support `target_session_attrs` in URL format, add tests (#1073) (by @elprans in 7cb4e70d) * Infinity numeric support (#1067) (by @krokoziabla in 0c3bf600 for #1020) * Add support for the `WHERE` clause in `copy_to` methods (#941) (by @kaylynn234 in b7ffab6c) * Add query logging callbacks and context manager (#1043) (by @dcwatson in b2697ffd) Fixes ===== * When prepared statements are disabled, avoid relying on them harder (#1065) (by @elprans in cbf64e18) * Handle environments with HOME set to a not-a-directory (#1063) (by @elprans in af922bcf) * Fix handling of non-ASCII passwords (#1062) (by @elprans in 89d5bd03) * Disable JIT while doing type introspection (#1082) (by @elprans in f21ebf64) * Remove connection parameter caching in `Pool` (#1053) (by @ermakov-oleg in 4ddb0397) * Switch to Python 3.12-style `wait_for` (#1086) (by @elprans in 4bdd8a7e) * Update automatic PostGIS type conversion for Shapely 2.0 (#1085) (by @ChimneySwift in 8b45beb4) * Use the `timeout` context manager in the connection path (#1087) (by @elprans in 313b2b2b) * Small fix for documentation on using SSL in Connection (#995) (by @ScottFred in ccc7baf9) * Use cleanup_ctx in pool usage doc (#878) (by @ir4y in 70c8bd81) * Close cursor portals once the iterator is exhausted (#1088) (by @elprans in ca9f03be) * Cut BaseProtocol circular reference on close. (#1049) (by @pteromys in 93a6f79a) * Allow passing hosts as tuples to `connect()` (in addition to lists) (#1021) (by @lezram in d7faaff5) Other ===== * Drop support for Python 3.7 (#1064) (by @bryanforbes in 87ab1431) --- .github/release_log.py | 5 +---- .github/workflows/release.yml | 1 + asyncpg/_version.py | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/release_log.py b/.github/release_log.py index 0e3ee7f4..717cd6f6 100755 --- a/.github/release_log.py +++ b/.github/release_log.py @@ -45,10 +45,7 @@ def main(): print(f'* {first_line}') print(f' (by {username} in {sha}', end='') - if issue_num: - print(f' for #{issue_num})') - else: - print(')') + print(')') print() diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 52daf01a..eef0799e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -98,6 +98,7 @@ jobs: name: Build ${{ matrix.only }} strategy: + fail-fast: false matrix: include: ${{ fromJson(needs.build-wheels-matrix.outputs.include) }} diff --git a/asyncpg/_version.py b/asyncpg/_version.py index ddc3a79a..64da11df 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.29.0.dev0' +__version__ = '0.29.0' From c2c8d20ba8c0057b0e0bc08bd892abc36862e553 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 4 Nov 2023 23:00:18 -0700 Subject: [PATCH 138/193] Post-release version bump --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 64da11df..67fd67ab 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,4 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.29.0' +__version__ = '0.30.0.dev0' From 1d4e56802a14d92b9f90b509ed8d841b851a0286 Mon Sep 17 00:00:00 2001 From: Eugene Toder Date: Mon, 4 Mar 2024 15:07:32 -0500 Subject: [PATCH 139/193] Implement GSSAPI authentication (#1122) Most commonly used with Kerberos. Closes: #769 --- .github/workflows/install-krb5.sh | 10 ++ .github/workflows/tests.yml | 2 + asyncpg/connect_utils.py | 19 +++- asyncpg/connection.py | 13 ++- asyncpg/protocol/coreproto.pxd | 15 +-- asyncpg/protocol/coreproto.pyx | 63 ++++++++++++- asyncpg/protocol/protocol.pxd | 1 - asyncpg/protocol/protocol.pyx | 5 +- pyproject.toml | 5 + tests/test_connect.py | 150 ++++++++++++++++++++++++------ 10 files changed, 230 insertions(+), 53 deletions(-) create mode 100755 .github/workflows/install-krb5.sh diff --git a/.github/workflows/install-krb5.sh b/.github/workflows/install-krb5.sh new file mode 100755 index 00000000..093b8519 --- /dev/null +++ b/.github/workflows/install-krb5.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -Eexuo pipefail + +if [ "$RUNNER_OS" == "Linux" ]; then + # Assume Ubuntu since this is the only Linux used in CI. + sudo apt-get update + sudo apt-get install -y --no-install-recommends \ + libkrb5-dev krb5-user krb5-kdc krb5-admin-server +fi diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7fc77b38..b7229e18 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -62,6 +62,7 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | + .github/workflows/install-krb5.sh python -m pip install -U pip setuptools wheel python -m pip install -e .[test] @@ -122,6 +123,7 @@ jobs: - name: Install Python Deps if: steps.release.outputs.version == 0 run: | + .github/workflows/install-krb5.sh python -m pip install -U pip setuptools wheel python -m pip install -e .[test] diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 414231fd..8039d1b4 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -56,6 +56,7 @@ def parse(cls, sslmode): 'direct_tls', 'server_settings', 'target_session_attrs', + 'krbsrvname', ]) @@ -261,7 +262,7 @@ def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]: def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, direct_tls, server_settings, - target_session_attrs): + target_session_attrs, krbsrvname): # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. auth_hosts = None @@ -383,6 +384,11 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if target_session_attrs is None: target_session_attrs = dsn_target_session_attrs + if 'krbsrvname' in query: + val = query.pop('krbsrvname') + if krbsrvname is None: + krbsrvname = val + if query: if server_settings is None: server_settings = query @@ -650,11 +656,15 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, ) ) from None + if krbsrvname is None: + krbsrvname = os.getenv('PGKRBSRVNAME') + params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, sslmode=sslmode, direct_tls=direct_tls, server_settings=server_settings, - target_session_attrs=target_session_attrs) + target_session_attrs=target_session_attrs, + krbsrvname=krbsrvname) return addrs, params @@ -665,7 +675,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, max_cached_statement_lifetime, max_cacheable_statement_size, ssl, direct_tls, server_settings, - target_session_attrs): + target_session_attrs, krbsrvname): local_vars = locals() for var_name in {'max_cacheable_statement_size', 'max_cached_statement_lifetime', @@ -694,7 +704,8 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, password=password, passfile=passfile, ssl=ssl, direct_tls=direct_tls, database=database, server_settings=server_settings, - target_session_attrs=target_session_attrs) + target_session_attrs=target_session_attrs, + krbsrvname=krbsrvname) config = _ClientConfiguration( command_timeout=command_timeout, diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 0367e365..bf5f6db6 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2007,7 +2007,8 @@ async def connect(dsn=None, *, connection_class=Connection, record_class=protocol.Record, server_settings=None, - target_session_attrs=None): + target_session_attrs=None, + krbsrvname=None): r"""A coroutine to establish a connection to a PostgreSQL server. The connection parameters may be specified either as a connection @@ -2235,6 +2236,10 @@ async def connect(dsn=None, *, or the value of the ``PGTARGETSESSIONATTRS`` environment variable, or ``"any"`` if neither is specified. + :param str krbsrvname: + Kerberos service name to use when authenticating with GSSAPI. This + must match the server configuration. Defaults to 'postgres'. + :return: A :class:`~asyncpg.connection.Connection` instance. Example: @@ -2303,6 +2308,9 @@ async def connect(dsn=None, *, .. versionchanged:: 0.28.0 Added the *target_session_attrs* parameter. + .. versionchanged:: 0.30.0 + Added the *krbsrvname* parameter. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context @@ -2344,7 +2352,8 @@ async def connect(dsn=None, *, statement_cache_size=statement_cache_size, max_cached_statement_lifetime=max_cached_statement_lifetime, max_cacheable_statement_size=max_cacheable_statement_size, - target_session_attrs=target_session_attrs + target_session_attrs=target_session_attrs, + krbsrvname=krbsrvname, ) diff --git a/asyncpg/protocol/coreproto.pxd b/asyncpg/protocol/coreproto.pxd index 7ce4f574..612d8cae 100644 --- a/asyncpg/protocol/coreproto.pxd +++ b/asyncpg/protocol/coreproto.pxd @@ -51,16 +51,6 @@ cdef enum AuthenticationMessage: AUTH_SASL_FINAL = 12 -AUTH_METHOD_NAME = { - AUTH_REQUIRED_KERBEROS: 'kerberosv5', - AUTH_REQUIRED_PASSWORD: 'password', - AUTH_REQUIRED_PASSWORDMD5: 'md5', - AUTH_REQUIRED_GSS: 'gss', - AUTH_REQUIRED_SASL: 'scram-sha-256', - AUTH_REQUIRED_SSPI: 'sspi', -} - - cdef enum ResultType: RESULT_OK = 1 RESULT_FAILED = 2 @@ -96,10 +86,13 @@ cdef class CoreProtocol: object transport + object address # Instance of _ConnectionParameters object con_params # Instance of SCRAMAuthentication SCRAMAuthentication scram + # Instance of gssapi.SecurityContext + object gss_ctx readonly int32_t backend_pid readonly int32_t backend_secret @@ -145,6 +138,8 @@ cdef class CoreProtocol: cdef _auth_password_message_md5(self, bytes salt) cdef _auth_password_message_sasl_initial(self, list sasl_auth_methods) cdef _auth_password_message_sasl_continue(self, bytes server_response) + cdef _auth_gss_init(self) + cdef _auth_gss_step(self, bytes server_response) cdef _write(self, buf) cdef _writelines(self, list buffers) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 64afe934..7a2b257e 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -6,14 +6,26 @@ import hashlib +import socket include "scram.pyx" +cdef dict AUTH_METHOD_NAME = { + AUTH_REQUIRED_KERBEROS: 'kerberosv5', + AUTH_REQUIRED_PASSWORD: 'password', + AUTH_REQUIRED_PASSWORDMD5: 'md5', + AUTH_REQUIRED_GSS: 'gss', + AUTH_REQUIRED_SASL: 'scram-sha-256', + AUTH_REQUIRED_SSPI: 'sspi', +} + + cdef class CoreProtocol: - def __init__(self, con_params): + def __init__(self, addr, con_params): + self.address = addr # type of `con_params` is `_ConnectionParameters` self.buffer = ReadBuffer() self.user = con_params.user @@ -26,6 +38,8 @@ cdef class CoreProtocol: self.encoding = 'utf-8' # type of `scram` is `SCRAMAuthentcation` self.scram = None + # type of `gss_ctx` is `gssapi.SecurityContext` + self.gss_ctx = None self._reset_result() @@ -619,9 +633,17 @@ cdef class CoreProtocol: 'could not verify server signature for ' 'SCRAM authentciation: scram-sha-256', ) + self.scram = None + + elif status == AUTH_REQUIRED_GSS: + self._auth_gss_init() + self.auth_msg = self._auth_gss_step(None) + + elif status == AUTH_REQUIRED_GSS_CONTINUE: + server_response = self.buffer.consume_message() + self.auth_msg = self._auth_gss_step(server_response) elif status in (AUTH_REQUIRED_KERBEROS, AUTH_REQUIRED_SCMCRED, - AUTH_REQUIRED_GSS, AUTH_REQUIRED_GSS_CONTINUE, AUTH_REQUIRED_SSPI): self.result_type = RESULT_FAILED self.result = apg_exc.InterfaceError( @@ -634,7 +656,8 @@ cdef class CoreProtocol: 'unsupported authentication method requested by the ' 'server: {}'.format(status)) - if status not in [AUTH_SASL_CONTINUE, AUTH_SASL_FINAL]: + if status not in [AUTH_SASL_CONTINUE, AUTH_SASL_FINAL, + AUTH_REQUIRED_GSS_CONTINUE]: self.buffer.discard_message() cdef _auth_password_message_cleartext(self): @@ -691,6 +714,40 @@ cdef class CoreProtocol: return msg + cdef _auth_gss_init(self): + try: + import gssapi + except ModuleNotFoundError: + raise RuntimeError( + 'gssapi module not found; please install asyncpg[gssapi] to ' + 'use asyncpg with Kerberos or GSSAPI authentication' + ) from None + + service_name = self.con_params.krbsrvname or 'postgres' + # find the canonical name of the server host + if isinstance(self.address, str): + raise RuntimeError('GSSAPI authentication is only supported for ' + 'TCP/IP connections') + + host = self.address[0] + host_cname = socket.gethostbyname_ex(host)[0] + gss_name = gssapi.Name(f'{service_name}/{host_cname}') + self.gss_ctx = gssapi.SecurityContext(name=gss_name, usage='initiate') + + cdef _auth_gss_step(self, bytes server_response): + cdef: + WriteBuffer msg + + token = self.gss_ctx.step(server_response) + if not token: + self.gss_ctx = None + return None + msg = WriteBuffer.new_message(b'p') + msg.write_bytes(token) + msg.end_message() + + return msg + cdef _parse_msg_ready_for_query(self): cdef char status = self.buffer.read_byte() diff --git a/asyncpg/protocol/protocol.pxd b/asyncpg/protocol/protocol.pxd index a9ac8d5f..cd221fbb 100644 --- a/asyncpg/protocol/protocol.pxd +++ b/asyncpg/protocol/protocol.pxd @@ -31,7 +31,6 @@ cdef class BaseProtocol(CoreProtocol): cdef: object loop - object address ConnectionSettings settings object cancel_sent_waiter object cancel_waiter diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index b43b0e9c..1459d908 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -75,7 +75,7 @@ NO_TIMEOUT = object() cdef class BaseProtocol(CoreProtocol): def __init__(self, addr, connected_fut, con_params, record_class: type, loop): # type of `con_params` is `_ConnectionParameters` - CoreProtocol.__init__(self, con_params) + CoreProtocol.__init__(self, addr, con_params) self.loop = loop self.transport = None @@ -83,8 +83,7 @@ cdef class BaseProtocol(CoreProtocol): self.cancel_waiter = None self.cancel_sent_waiter = None - self.address = addr - self.settings = ConnectionSettings((self.address, con_params.database)) + self.settings = ConnectionSettings((addr, con_params.database)) self.record_class = record_class self.statement = None diff --git a/pyproject.toml b/pyproject.toml index ed2340a7..8209d838 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,9 +35,14 @@ dependencies = [ github = "https://github.com/MagicStack/asyncpg" [project.optional-dependencies] +gssapi = [ + 'gssapi', +] test = [ 'flake8~=6.1', 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.12.0"', + 'gssapi; platform_system == "Linux"', + 'k5test; platform_system == "Linux"', ] docs = [ 'Sphinx~=5.3.0', diff --git a/tests/test_connect.py b/tests/test_connect.py index 5333e2c5..ebf0e462 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -130,30 +130,22 @@ def test_server_version_02(self): CORRECT_PASSWORD = 'correct\u1680password' -class TestAuthentication(tb.ConnectedTestCase): +class BaseTestAuthentication(tb.ConnectedTestCase): + USERS = [] + def setUp(self): super().setUp() if not self.cluster.is_managed(): self.skipTest('unmanaged cluster') - methods = [ - ('trust', None), - ('reject', None), - ('scram-sha-256', CORRECT_PASSWORD), - ('md5', CORRECT_PASSWORD), - ('password', CORRECT_PASSWORD), - ] - self.cluster.reset_hba() create_script = [] - for method, password in methods: + for username, method, password in self.USERS: if method == 'scram-sha-256' and self.server_version.major < 10: continue - username = method.replace('-', '_') - # if this is a SCRAM password, we need to set the encryption method # to "scram-sha-256" in order to properly hash the password if method == 'scram-sha-256': @@ -162,7 +154,7 @@ def setUp(self): ) create_script.append( - 'CREATE ROLE {}_user WITH LOGIN{};'.format( + 'CREATE ROLE "{}" WITH LOGIN{};'.format( username, f' PASSWORD E{(password or "")!r}' ) @@ -175,20 +167,20 @@ def setUp(self): "SET password_encryption = 'md5';" ) - if _system != 'Windows': + if _system != 'Windows' and method != 'gss': self.cluster.add_hba_entry( type='local', - database='postgres', user='{}_user'.format(username), + database='postgres', user=username, auth_method=method) self.cluster.add_hba_entry( type='host', address=ipaddress.ip_network('127.0.0.0/24'), - database='postgres', user='{}_user'.format(username), + database='postgres', user=username, auth_method=method) self.cluster.add_hba_entry( type='host', address=ipaddress.ip_network('::1/128'), - database='postgres', user='{}_user'.format(username), + database='postgres', user=username, auth_method=method) # Put hba changes into effect @@ -201,28 +193,28 @@ def tearDown(self): # Reset cluster's pg_hba.conf since we've meddled with it self.cluster.trust_local_connections() - methods = [ - 'trust', - 'reject', - 'scram-sha-256', - 'md5', - 'password', - ] - drop_script = [] - for method in methods: + for username, method, _ in self.USERS: if method == 'scram-sha-256' and self.server_version.major < 10: continue - username = method.replace('-', '_') - - drop_script.append('DROP ROLE {}_user;'.format(username)) + drop_script.append('DROP ROLE "{}";'.format(username)) drop_script = '\n'.join(drop_script) self.loop.run_until_complete(self.con.execute(drop_script)) super().tearDown() + +class TestAuthentication(BaseTestAuthentication): + USERS = [ + ('trust_user', 'trust', None), + ('reject_user', 'reject', None), + ('scram_sha_256_user', 'scram-sha-256', CORRECT_PASSWORD), + ('md5_user', 'md5', CORRECT_PASSWORD), + ('password_user', 'password', CORRECT_PASSWORD), + ] + async def _try_connect(self, **kwargs): # On Windows the server sometimes just closes # the connection sooner than we receive the @@ -388,6 +380,62 @@ async def test_auth_md5_unsupported(self, _): await self.connect(user='md5_user', password=CORRECT_PASSWORD) +class TestGssAuthentication(BaseTestAuthentication): + @classmethod + def setUpClass(cls): + try: + from k5test.realm import K5Realm + except ModuleNotFoundError: + raise unittest.SkipTest('k5test not installed') + + cls.realm = K5Realm() + cls.addClassCleanup(cls.realm.stop) + # Setup environment before starting the cluster. + patch = unittest.mock.patch.dict(os.environ, cls.realm.env) + patch.start() + cls.addClassCleanup(patch.stop) + # Add credentials. + cls.realm.addprinc('postgres/localhost') + cls.realm.extract_keytab('postgres/localhost', cls.realm.keytab) + + cls.USERS = [(cls.realm.user_princ, 'gss', None)] + super().setUpClass() + + cls.cluster.override_connection_spec(host='localhost') + + @classmethod + def get_server_settings(cls): + settings = super().get_server_settings() + settings['krb_server_keyfile'] = f'FILE:{cls.realm.keytab}' + return settings + + @classmethod + def setup_cluster(cls): + cls.cluster = cls.new_cluster(pg_cluster.TempCluster) + cls.start_cluster( + cls.cluster, server_settings=cls.get_server_settings()) + + async def test_auth_gssapi(self): + conn = await self.connect(user=self.realm.user_princ) + await conn.close() + + # Service name mismatch. + with self.assertRaisesRegex( + exceptions.InternalClientError, + 'Server .* not found' + ): + await self.connect(user=self.realm.user_princ, krbsrvname='wrong') + + # Credentials mismatch. + self.realm.addprinc('wrong_user', 'password') + self.realm.kinit('wrong_user', 'password') + with self.assertRaisesRegex( + exceptions.InvalidAuthorizationSpecificationError, + 'GSSAPI authentication failed for user' + ): + await self.connect(user=self.realm.user_princ) + + class TestConnectParams(tb.TestCase): TESTS = [ @@ -600,6 +648,46 @@ class TestConnectParams(tb.TestCase): }) }, + { + 'name': 'krbsrvname', + 'dsn': 'postgresql://user@host/db?krbsrvname=srv_qs', + 'env': { + 'PGKRBSRVNAME': 'srv_env', + }, + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'krbsrvname': 'srv_qs', + }) + }, + + { + 'name': 'krbsrvname_2', + 'dsn': 'postgresql://user@host/db?krbsrvname=srv_qs', + 'krbsrvname': 'srv_kws', + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'krbsrvname': 'srv_kws', + }) + }, + + { + 'name': 'krbsrvname_3', + 'dsn': 'postgresql://user@host/db', + 'env': { + 'PGKRBSRVNAME': 'srv_env', + }, + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'krbsrvname': 'srv_env', + }) + }, + { 'name': 'dsn_ipv6_multi_host', 'dsn': 'postgresql://user@[2001:db8::1234%25eth0],[::1]/db', @@ -883,6 +971,7 @@ def run_testcase(self, testcase): sslmode = testcase.get('ssl') server_settings = testcase.get('server_settings') target_session_attrs = testcase.get('target_session_attrs') + krbsrvname = testcase.get('krbsrvname') expected = testcase.get('result') expected_error = testcase.get('error') @@ -907,7 +996,8 @@ def run_testcase(self, testcase): passfile=passfile, database=database, ssl=sslmode, direct_tls=False, server_settings=server_settings, - target_session_attrs=target_session_attrs) + target_session_attrs=target_session_attrs, + krbsrvname=krbsrvname) params = { k: v for k, v in params._asdict().items() From d42432bff29c5fe7c42926a24bd1272dea39af90 Mon Sep 17 00:00:00 2001 From: Bryan Forbes Date: Mon, 4 Mar 2024 18:37:43 -0600 Subject: [PATCH 140/193] Add initial typings (#1127) * Added typings to miscellaneous files * Added unit test to check codebase with mypy * Updated release workflow and build to account for annotations * Updated manifest to include stub files --- .flake8 | 4 +- .github/workflows/release.yml | 2 +- .gitignore | 2 + MANIFEST.in | 2 +- asyncpg/__init__.py | 7 +- asyncpg/_asyncio_compat.py | 13 +- asyncpg/_version.py | 6 +- asyncpg/compat.py | 24 ++- asyncpg/introspection.py | 22 ++- asyncpg/protocol/__init__.py | 2 + asyncpg/protocol/protocol.pyi | 300 ++++++++++++++++++++++++++++++++++ asyncpg/serverversion.py | 24 ++- asyncpg/types.py | 102 ++++++++---- pyproject.toml | 27 ++- setup.py | 2 +- tests/test__sourcecode.py | 33 +++- 16 files changed, 512 insertions(+), 60 deletions(-) create mode 100644 asyncpg/protocol/protocol.pyi diff --git a/.flake8 b/.flake8 index decf40da..d4e76b7a 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,5 @@ [flake8] +select = C90,E,F,W,Y0 ignore = E402,E731,W503,W504,E252 -exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv,.tox +exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv*,.tox +per-file-ignores = *.pyi: F401,F403,F405,F811,E127,E128,E203,E266,E301,E302,E305,E501,E701,E704,E741,B303,W503,W504 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index eef0799e..450f471e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,7 +22,7 @@ jobs: github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} version_file: asyncpg/_version.py version_line_pattern: | - __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + __version__(?:\s*:\s*typing\.Final)?\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) - name: Stop if not approved if: steps.checkver.outputs.approved != 'true' diff --git a/.gitignore b/.gitignore index 21286094..53c0daa1 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,5 @@ docs/_build /.eggs /.vscode /.mypy_cache +/.venv* +/.tox diff --git a/MANIFEST.in b/MANIFEST.in index 2389f6fa..3eac0565 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,5 @@ recursive-include docs *.py *.rst Makefile *.css recursive-include examples *.py recursive-include tests *.py *.pem -recursive-include asyncpg *.pyx *.pxd *.pxi *.py *.c *.h +recursive-include asyncpg *.pyx *.pxd *.pxi *.py *.pyi *.c *.h include LICENSE README.rst Makefile performance.png .flake8 diff --git a/asyncpg/__init__.py b/asyncpg/__init__.py index e8cd11eb..e8811a9d 100644 --- a/asyncpg/__init__.py +++ b/asyncpg/__init__.py @@ -4,6 +4,7 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations from .connection import connect, Connection # NOQA from .exceptions import * # NOQA @@ -14,6 +15,10 @@ from ._version import __version__ # NOQA +from . import exceptions -__all__ = ('connect', 'create_pool', 'Pool', 'Record', 'Connection') + +__all__: tuple[str, ...] = ( + 'connect', 'create_pool', 'Pool', 'Record', 'Connection' +) __all__ += exceptions.__all__ # NOQA diff --git a/asyncpg/_asyncio_compat.py b/asyncpg/_asyncio_compat.py index ad7dfd8c..a211d0a9 100644 --- a/asyncpg/_asyncio_compat.py +++ b/asyncpg/_asyncio_compat.py @@ -4,18 +4,25 @@ # # SPDX-License-Identifier: PSF-2.0 +from __future__ import annotations import asyncio import functools import sys +import typing + +if typing.TYPE_CHECKING: + from . import compat if sys.version_info < (3, 11): from async_timeout import timeout as timeout_ctx else: from asyncio import timeout as timeout_ctx +_T = typing.TypeVar('_T') + -async def wait_for(fut, timeout): +async def wait_for(fut: compat.Awaitable[_T], timeout: float | None) -> _T: """Wait for the single Future or coroutine to complete, with timeout. Coroutine will be wrapped in Task. @@ -65,7 +72,7 @@ async def wait_for(fut, timeout): return await fut -async def _cancel_and_wait(fut): +async def _cancel_and_wait(fut: asyncio.Future[_T]) -> None: """Cancel the *fut* future or task and wait until it completes.""" loop = asyncio.get_running_loop() @@ -82,6 +89,6 @@ async def _cancel_and_wait(fut): fut.remove_done_callback(cb) -def _release_waiter(waiter, *args): +def _release_waiter(waiter: asyncio.Future[typing.Any], *args: object) -> None: if not waiter.done(): waiter.set_result(None) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 67fd67ab..383fe4d2 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -10,4 +10,8 @@ # supported platforms, publish the packages on PyPI, merge the PR # to the target branch, create a Git tag pointing to the commit. -__version__ = '0.30.0.dev0' +from __future__ import annotations + +import typing + +__version__: typing.Final = '0.30.0.dev0' diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 3eec9eb7..435b4c48 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -4,22 +4,25 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations import pathlib import platform import typing import sys +if typing.TYPE_CHECKING: + import asyncio -SYSTEM = platform.uname().system +SYSTEM: typing.Final = platform.uname().system -if SYSTEM == 'Windows': +if sys.platform == 'win32': import ctypes.wintypes - CSIDL_APPDATA = 0x001a + CSIDL_APPDATA: typing.Final = 0x001a - def get_pg_home_directory() -> typing.Optional[pathlib.Path]: + def get_pg_home_directory() -> pathlib.Path | None: # We cannot simply use expanduser() as that returns the user's # home directory, whereas Postgres stores its config in # %AppData% on Windows. @@ -31,14 +34,14 @@ def get_pg_home_directory() -> typing.Optional[pathlib.Path]: return pathlib.Path(buf.value) / 'postgresql' else: - def get_pg_home_directory() -> typing.Optional[pathlib.Path]: + def get_pg_home_directory() -> pathlib.Path | None: try: return pathlib.Path.home() except (RuntimeError, KeyError): return None -async def wait_closed(stream): +async def wait_closed(stream: asyncio.StreamWriter) -> None: # Not all asyncio versions have StreamWriter.wait_closed(). if hasattr(stream, 'wait_closed'): try: @@ -59,3 +62,12 @@ async def wait_closed(stream): from ._asyncio_compat import timeout_ctx as timeout # noqa: F401 else: from asyncio import timeout as timeout # noqa: F401 + +if sys.version_info < (3, 9): + from typing import ( # noqa: F401 + Awaitable as Awaitable, + ) +else: + from collections.abc import ( # noqa: F401 + Awaitable as Awaitable, + ) diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index 6c2caf03..641cf700 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -4,8 +4,14 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations -_TYPEINFO_13 = '''\ +import typing + +if typing.TYPE_CHECKING: + from . import protocol + +_TYPEINFO_13: typing.Final = '''\ ( SELECT t.oid AS oid, @@ -124,7 +130,7 @@ '''.format(typeinfo=_TYPEINFO_13) -_TYPEINFO = '''\ +_TYPEINFO: typing.Final = '''\ ( SELECT t.oid AS oid, @@ -248,7 +254,7 @@ '''.format(typeinfo=_TYPEINFO) -TYPE_BY_NAME = '''\ +TYPE_BY_NAME: typing.Final = '''\ SELECT t.oid, t.typelem AS elemtype, @@ -277,16 +283,16 @@ SCALAR_TYPE_KINDS = (b'b', b'd', b'e') -def is_scalar_type(typeinfo) -> bool: +def is_scalar_type(typeinfo: protocol.Record) -> bool: return ( typeinfo['kind'] in SCALAR_TYPE_KINDS and not typeinfo['elemtype'] ) -def is_domain_type(typeinfo) -> bool: - return typeinfo['kind'] == b'd' +def is_domain_type(typeinfo: protocol.Record) -> bool: + return typeinfo['kind'] == b'd' # type: ignore[no-any-return] -def is_composite_type(typeinfo) -> bool: - return typeinfo['kind'] == b'c' +def is_composite_type(typeinfo: protocol.Record) -> bool: + return typeinfo['kind'] == b'c' # type: ignore[no-any-return] diff --git a/asyncpg/protocol/__init__.py b/asyncpg/protocol/__init__.py index 8b3e06a0..af9287bd 100644 --- a/asyncpg/protocol/__init__.py +++ b/asyncpg/protocol/__init__.py @@ -6,4 +6,6 @@ # flake8: NOQA +from __future__ import annotations + from .protocol import Protocol, Record, NO_TIMEOUT, BUILTIN_TYPE_NAME_MAP diff --git a/asyncpg/protocol/protocol.pyi b/asyncpg/protocol/protocol.pyi new file mode 100644 index 00000000..f85c5b6d --- /dev/null +++ b/asyncpg/protocol/protocol.pyi @@ -0,0 +1,300 @@ +import asyncio +import asyncio.protocols +import hmac +from codecs import CodecInfo +from collections.abc import Callable, Iterable, Iterator, Sequence +from hashlib import md5, sha256 +from typing import ( + Any, + ClassVar, + Final, + Generic, + Literal, + NewType, + TypeVar, + final, + overload, +) +from typing_extensions import TypeAlias + +import asyncpg.pgproto.pgproto + +from ..connect_utils import _ConnectionParameters +from ..pgproto.pgproto import WriteBuffer +from ..types import Attribute, Type + +_T = TypeVar('_T') +_Record = TypeVar('_Record', bound=Record) +_OtherRecord = TypeVar('_OtherRecord', bound=Record) +_PreparedStatementState = TypeVar( + '_PreparedStatementState', bound=PreparedStatementState[Any] +) + +_NoTimeoutType = NewType('_NoTimeoutType', object) +_TimeoutType: TypeAlias = float | None | _NoTimeoutType + +BUILTIN_TYPE_NAME_MAP: Final[dict[str, int]] +BUILTIN_TYPE_OID_MAP: Final[dict[int, str]] +NO_TIMEOUT: Final[_NoTimeoutType] + +hashlib_md5 = md5 + +@final +class ConnectionSettings(asyncpg.pgproto.pgproto.CodecContext): + __pyx_vtable__: Any + def __init__(self, conn_key: object) -> None: ... + def add_python_codec( + self, + typeoid: int, + typename: str, + typeschema: str, + typeinfos: Iterable[object], + typekind: str, + encoder: Callable[[Any], Any], + decoder: Callable[[Any], Any], + format: object, + ) -> Any: ... + def clear_type_cache(self) -> None: ... + def get_data_codec( + self, oid: int, format: object = ..., ignore_custom_codec: bool = ... + ) -> Any: ... + def get_text_codec(self) -> CodecInfo: ... + def register_data_types(self, types: Iterable[object]) -> None: ... + def remove_python_codec( + self, typeoid: int, typename: str, typeschema: str + ) -> None: ... + def set_builtin_type_codec( + self, + typeoid: int, + typename: str, + typeschema: str, + typekind: str, + alias_to: str, + format: object = ..., + ) -> Any: ... + def __getattr__(self, name: str) -> Any: ... + def __reduce__(self) -> Any: ... + +@final +class PreparedStatementState(Generic[_Record]): + closed: bool + prepared: bool + name: str + query: str + refs: int + record_class: type[_Record] + ignore_custom_codec: bool + __pyx_vtable__: Any + def __init__( + self, + name: str, + query: str, + protocol: BaseProtocol[Any], + record_class: type[_Record], + ignore_custom_codec: bool, + ) -> None: ... + def _get_parameters(self) -> tuple[Type, ...]: ... + def _get_attributes(self) -> tuple[Attribute, ...]: ... + def _init_types(self) -> set[int]: ... + def _init_codecs(self) -> None: ... + def attach(self) -> None: ... + def detach(self) -> None: ... + def mark_closed(self) -> None: ... + def mark_unprepared(self) -> None: ... + def __reduce__(self) -> Any: ... + +class CoreProtocol: + backend_pid: Any + backend_secret: Any + __pyx_vtable__: Any + def __init__(self, addr: object, con_params: _ConnectionParameters) -> None: ... + def is_in_transaction(self) -> bool: ... + def __reduce__(self) -> Any: ... + +class BaseProtocol(CoreProtocol, Generic[_Record]): + queries_count: Any + is_ssl: bool + __pyx_vtable__: Any + def __init__( + self, + addr: object, + connected_fut: object, + con_params: _ConnectionParameters, + record_class: type[_Record], + loop: object, + ) -> None: ... + def set_connection(self, connection: object) -> None: ... + def get_server_pid(self, *args: object, **kwargs: object) -> int: ... + def get_settings(self, *args: object, **kwargs: object) -> ConnectionSettings: ... + def get_record_class(self) -> type[_Record]: ... + def abort(self) -> None: ... + async def bind( + self, + state: PreparedStatementState[_OtherRecord], + args: Sequence[object], + portal_name: str, + timeout: _TimeoutType, + ) -> Any: ... + @overload + async def bind_execute( + self, + state: PreparedStatementState[_OtherRecord], + args: Sequence[object], + portal_name: str, + limit: int, + return_extra: Literal[False], + timeout: _TimeoutType, + ) -> list[_OtherRecord]: ... + @overload + async def bind_execute( + self, + state: PreparedStatementState[_OtherRecord], + args: Sequence[object], + portal_name: str, + limit: int, + return_extra: Literal[True], + timeout: _TimeoutType, + ) -> tuple[list[_OtherRecord], bytes, bool]: ... + @overload + async def bind_execute( + self, + state: PreparedStatementState[_OtherRecord], + args: Sequence[object], + portal_name: str, + limit: int, + return_extra: bool, + timeout: _TimeoutType, + ) -> list[_OtherRecord] | tuple[list[_OtherRecord], bytes, bool]: ... + async def bind_execute_many( + self, + state: PreparedStatementState[_OtherRecord], + args: Iterable[Sequence[object]], + portal_name: str, + timeout: _TimeoutType, + ) -> None: ... + async def close(self, timeout: _TimeoutType) -> None: ... + def _get_timeout(self, timeout: _TimeoutType) -> float | None: ... + def _is_cancelling(self) -> bool: ... + async def _wait_for_cancellation(self) -> None: ... + async def close_statement( + self, state: PreparedStatementState[_OtherRecord], timeout: _TimeoutType + ) -> Any: ... + async def copy_in(self, *args: object, **kwargs: object) -> str: ... + async def copy_out(self, *args: object, **kwargs: object) -> str: ... + async def execute(self, *args: object, **kwargs: object) -> Any: ... + def is_closed(self, *args: object, **kwargs: object) -> Any: ... + def is_connected(self, *args: object, **kwargs: object) -> Any: ... + def data_received(self, data: object) -> None: ... + def connection_made(self, transport: object) -> None: ... + def connection_lost(self, exc: Exception | None) -> None: ... + def pause_writing(self, *args: object, **kwargs: object) -> Any: ... + @overload + async def prepare( + self, + stmt_name: str, + query: str, + timeout: float | None = ..., + *, + state: _PreparedStatementState, + ignore_custom_codec: bool = ..., + record_class: None, + ) -> _PreparedStatementState: ... + @overload + async def prepare( + self, + stmt_name: str, + query: str, + timeout: float | None = ..., + *, + state: None = ..., + ignore_custom_codec: bool = ..., + record_class: type[_OtherRecord], + ) -> PreparedStatementState[_OtherRecord]: ... + async def close_portal(self, portal_name: str, timeout: _TimeoutType) -> None: ... + async def query(self, *args: object, **kwargs: object) -> str: ... + def resume_writing(self, *args: object, **kwargs: object) -> Any: ... + def __reduce__(self) -> Any: ... + +@final +class Codec: + __pyx_vtable__: Any + def __reduce__(self) -> Any: ... + +class DataCodecConfig: + __pyx_vtable__: Any + def __init__(self, cache_key: object) -> None: ... + def add_python_codec( + self, + typeoid: int, + typename: str, + typeschema: str, + typekind: str, + typeinfos: Iterable[object], + encoder: Callable[[ConnectionSettings, WriteBuffer, object], object], + decoder: Callable[..., object], + format: object, + xformat: object, + ) -> Any: ... + def add_types(self, types: Iterable[object]) -> Any: ... + def clear_type_cache(self) -> None: ... + def declare_fallback_codec(self, oid: int, name: str, schema: str) -> Codec: ... + def remove_python_codec( + self, typeoid: int, typename: str, typeschema: str + ) -> Any: ... + def set_builtin_type_codec( + self, + typeoid: int, + typename: str, + typeschema: str, + typekind: str, + alias_to: str, + format: object = ..., + ) -> Any: ... + def __reduce__(self) -> Any: ... + +class Protocol(BaseProtocol[_Record], asyncio.protocols.Protocol): ... + +class Record: + @overload + def get(self, key: str) -> Any | None: ... + @overload + def get(self, key: str, default: _T) -> Any | _T: ... + def items(self) -> Iterator[tuple[str, Any]]: ... + def keys(self) -> Iterator[str]: ... + def values(self) -> Iterator[Any]: ... + @overload + def __getitem__(self, index: str) -> Any: ... + @overload + def __getitem__(self, index: int) -> Any: ... + @overload + def __getitem__(self, index: slice) -> tuple[Any, ...]: ... + def __iter__(self) -> Iterator[Any]: ... + def __contains__(self, x: object) -> bool: ... + def __len__(self) -> int: ... + +class Timer: + def __init__(self, budget: float | None) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, et: object, e: object, tb: object) -> None: ... + def get_remaining_budget(self) -> float: ... + def has_budget_greater_than(self, amount: float) -> bool: ... + +@final +class SCRAMAuthentication: + AUTHENTICATION_METHODS: ClassVar[list[str]] + DEFAULT_CLIENT_NONCE_BYTES: ClassVar[int] + DIGEST = sha256 + REQUIREMENTS_CLIENT_FINAL_MESSAGE: ClassVar[list[str]] + REQUIREMENTS_CLIENT_PROOF: ClassVar[list[str]] + SASLPREP_PROHIBITED: ClassVar[tuple[Callable[[str], bool], ...]] + authentication_method: bytes + authorization_message: bytes | None + client_channel_binding: bytes + client_first_message_bare: bytes | None + client_nonce: bytes | None + client_proof: bytes | None + password_salt: bytes | None + password_iterations: int + server_first_message: bytes | None + server_key: hmac.HMAC | None + server_nonce: bytes | None diff --git a/asyncpg/serverversion.py b/asyncpg/serverversion.py index 31568a2e..ee9647b4 100644 --- a/asyncpg/serverversion.py +++ b/asyncpg/serverversion.py @@ -4,12 +4,14 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations import re +import typing from .types import ServerVersion -version_regex = re.compile( +version_regex: typing.Final = re.compile( r"(Postgre[^\s]*)?\s*" r"(?P[0-9]+)\.?" r"((?P[0-9]+)\.?)?" @@ -19,7 +21,15 @@ ) -def split_server_version_string(version_string): +class _VersionDict(typing.TypedDict): + major: int + minor: int | None + micro: int | None + releaselevel: str | None + serial: int | None + + +def split_server_version_string(version_string: str) -> ServerVersion: version_match = version_regex.search(version_string) if version_match is None: @@ -28,17 +38,17 @@ def split_server_version_string(version_string): f'version from "{version_string}"' ) - version = version_match.groupdict() + version: _VersionDict = version_match.groupdict() # type: ignore[assignment] # noqa: E501 for ver_key, ver_value in version.items(): # Cast all possible versions parts to int try: - version[ver_key] = int(ver_value) + version[ver_key] = int(ver_value) # type: ignore[literal-required, call-overload] # noqa: E501 except (TypeError, ValueError): pass - if version.get("major") < 10: + if version["major"] < 10: return ServerVersion( - version.get("major"), + version["major"], version.get("minor") or 0, version.get("micro") or 0, version.get("releaselevel") or "final", @@ -52,7 +62,7 @@ def split_server_version_string(version_string): # want to keep that behaviour consistent, i.e not fail # a major version check due to a bugfix release. return ServerVersion( - version.get("major"), + version["major"], 0, version.get("minor") or 0, version.get("releaselevel") or "final", diff --git a/asyncpg/types.py b/asyncpg/types.py index bd5813fc..7a24e24c 100644 --- a/asyncpg/types.py +++ b/asyncpg/types.py @@ -4,14 +4,18 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations -import collections +import typing from asyncpg.pgproto.types import ( BitString, Point, Path, Polygon, Box, Line, LineSegment, Circle, ) +if typing.TYPE_CHECKING: + from typing_extensions import Self + __all__ = ( 'Type', 'Attribute', 'Range', 'BitString', 'Point', 'Path', 'Polygon', @@ -19,7 +23,13 @@ ) -Type = collections.namedtuple('Type', ['oid', 'name', 'kind', 'schema']) +class Type(typing.NamedTuple): + oid: int + name: str + kind: str + schema: str + + Type.__doc__ = 'Database data type.' Type.oid.__doc__ = 'OID of the type.' Type.name.__doc__ = 'Type name. For example "int2".' @@ -28,25 +38,61 @@ Type.schema.__doc__ = 'Name of the database schema that defines the type.' -Attribute = collections.namedtuple('Attribute', ['name', 'type']) +class Attribute(typing.NamedTuple): + name: str + type: Type + + Attribute.__doc__ = 'Database relation attribute.' Attribute.name.__doc__ = 'Attribute name.' Attribute.type.__doc__ = 'Attribute data type :class:`asyncpg.types.Type`.' -ServerVersion = collections.namedtuple( - 'ServerVersion', ['major', 'minor', 'micro', 'releaselevel', 'serial']) +class ServerVersion(typing.NamedTuple): + major: int + minor: int + micro: int + releaselevel: str + serial: int + + ServerVersion.__doc__ = 'PostgreSQL server version tuple.' -class Range: - """Immutable representation of PostgreSQL `range` type.""" +class _RangeValue(typing.Protocol): + def __eq__(self, __value: object) -> bool: + ... + + def __lt__(self, __other: _RangeValue) -> bool: + ... + + def __gt__(self, __other: _RangeValue) -> bool: + ... + - __slots__ = '_lower', '_upper', '_lower_inc', '_upper_inc', '_empty' +_RV = typing.TypeVar('_RV', bound=_RangeValue) + + +class Range(typing.Generic[_RV]): + """Immutable representation of PostgreSQL `range` type.""" - def __init__(self, lower=None, upper=None, *, - lower_inc=True, upper_inc=False, - empty=False): + __slots__ = ('_lower', '_upper', '_lower_inc', '_upper_inc', '_empty') + + _lower: _RV | None + _upper: _RV | None + _lower_inc: bool + _upper_inc: bool + _empty: bool + + def __init__( + self, + lower: _RV | None = None, + upper: _RV | None = None, + *, + lower_inc: bool = True, + upper_inc: bool = False, + empty: bool = False + ) -> None: self._empty = empty if empty: self._lower = self._upper = None @@ -58,34 +104,34 @@ def __init__(self, lower=None, upper=None, *, self._upper_inc = upper is not None and upper_inc @property - def lower(self): + def lower(self) -> _RV | None: return self._lower @property - def lower_inc(self): + def lower_inc(self) -> bool: return self._lower_inc @property - def lower_inf(self): + def lower_inf(self) -> bool: return self._lower is None and not self._empty @property - def upper(self): + def upper(self) -> _RV | None: return self._upper @property - def upper_inc(self): + def upper_inc(self) -> bool: return self._upper_inc @property - def upper_inf(self): + def upper_inf(self) -> bool: return self._upper is None and not self._empty @property - def isempty(self): + def isempty(self) -> bool: return self._empty - def _issubset_lower(self, other): + def _issubset_lower(self, other: Self) -> bool: if other._lower is None: return True if self._lower is None: @@ -96,7 +142,7 @@ def _issubset_lower(self, other): and (other._lower_inc or not self._lower_inc) ) - def _issubset_upper(self, other): + def _issubset_upper(self, other: Self) -> bool: if other._upper is None: return True if self._upper is None: @@ -107,7 +153,7 @@ def _issubset_upper(self, other): and (other._upper_inc or not self._upper_inc) ) - def issubset(self, other): + def issubset(self, other: Self) -> bool: if self._empty: return True if other._empty: @@ -115,13 +161,13 @@ def issubset(self, other): return self._issubset_lower(other) and self._issubset_upper(other) - def issuperset(self, other): + def issuperset(self, other: Self) -> bool: return other.issubset(self) - def __bool__(self): + def __bool__(self) -> bool: return not self._empty - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if not isinstance(other, Range): return NotImplemented @@ -132,14 +178,14 @@ def __eq__(self, other): self._upper_inc, self._empty ) == ( - other._lower, - other._upper, + other._lower, # pyright: ignore [reportUnknownMemberType] + other._upper, # pyright: ignore [reportUnknownMemberType] other._lower_inc, other._upper_inc, other._empty ) - def __hash__(self): + def __hash__(self) -> int: return hash(( self._lower, self._upper, @@ -148,7 +194,7 @@ def __hash__(self): self._empty )) - def __repr__(self): + def __repr__(self) -> str: if self._empty: desc = 'empty' else: diff --git a/pyproject.toml b/pyproject.toml index 8209d838..0019dadc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ classifiers = [ "Topic :: Database :: Front-Ends", ] dependencies = [ - 'async_timeout>=4.0.3; python_version < "3.12.0"' + 'async_timeout>=4.0.3; python_version < "3.12.0"', ] [project.urls] @@ -40,9 +40,11 @@ gssapi = [ ] test = [ 'flake8~=6.1', + 'flake8-pyi~=24.1.0', 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.12.0"', 'gssapi; platform_system == "Linux"', 'k5test; platform_system == "Linux"', + 'mypy~=1.8.0', ] docs = [ 'Sphinx~=5.3.0', @@ -107,3 +109,26 @@ exclude_lines = [ "if __name__ == .__main__.", ] show_missing = true + +[tool.mypy] +incremental = true +strict = true +implicit_reexport = true + +[[tool.mypy.overrides]] +module = [ + "asyncpg._testbase", + "asyncpg._testbase.*", + "asyncpg.cluster", + "asyncpg.connect_utils", + "asyncpg.connection", + "asyncpg.connresource", + "asyncpg.cursor", + "asyncpg.exceptions", + "asyncpg.exceptions.*", + "asyncpg.pool", + "asyncpg.prepared_stmt", + "asyncpg.transaction", + "asyncpg.utils", +] +ignore_errors = true diff --git a/setup.py b/setup.py index c4d42d82..f7c3c471 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ with open(str(_ROOT / 'asyncpg' / '_version.py')) as f: for line in f: - if line.startswith('__version__ ='): + if line.startswith('__version__: typing.Final ='): _, _, version = line.partition('=') VERSION = version.strip(" \n'\"") break diff --git a/tests/test__sourcecode.py b/tests/test__sourcecode.py index 28ffdea7..b19044d4 100644 --- a/tests/test__sourcecode.py +++ b/tests/test__sourcecode.py @@ -14,7 +14,7 @@ def find_root(): return os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -class TestFlake8(unittest.TestCase): +class TestCodeQuality(unittest.TestCase): def test_flake8(self): try: @@ -38,3 +38,34 @@ def test_flake8(self): output = ex.output.decode() raise AssertionError( 'flake8 validation failed:\n{}'.format(output)) from None + + def test_mypy(self): + try: + import mypy # NoQA + except ImportError: + raise unittest.SkipTest('mypy module is missing') + + root_path = find_root() + config_path = os.path.join(root_path, 'pyproject.toml') + if not os.path.exists(config_path): + raise RuntimeError('could not locate mypy.ini file') + + try: + subprocess.run( + [ + sys.executable, + '-m', + 'mypy', + '--config-file', + config_path, + 'asyncpg' + ], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=root_path + ) + except subprocess.CalledProcessError as ex: + output = ex.output.decode() + raise AssertionError( + 'mypy validation failed:\n{}'.format(output)) from None From 1aab2094d82104d5eee2cffcfd0c7e7347d4c5b8 Mon Sep 17 00:00:00 2001 From: Eugene Toder Date: Mon, 11 Mar 2024 13:34:24 -0400 Subject: [PATCH 141/193] Implement SSPI authentication (#1128) SSPI is a Windows technology for secure authentication. SSPI and GSSAPI interoperate as clients and servers. Postgres documentation recommends using SSPI on Windows clients and servers and GSSAPI on non-Windows platforms[1]. Changes in this PR: * Support AUTH_REQUIRED_SSPI server request. This is the same as AUTH_REQUIRED_GSS, except it allows negotiation with SSPI clients. * Allow using SSPI on the client. Which library to use can be specified using the `gsslib` connection parameter. * Use SSPI instead of GSSAPI on Windows by default. The latter requires installing Kerberos for Windows and is unlikely to work out of the box. Closes #142 [1] https://www.postgresql.org/docs/current/sspi-auth.html --- README.rst | 11 +++- asyncpg/connect_utils.py | 23 +++++-- asyncpg/connection.py | 10 ++- asyncpg/protocol/coreproto.pxd | 6 +- asyncpg/protocol/coreproto.pyx | 67 ++++++++++++------- docs/installation.rst | 29 +++++++-- pyproject.toml | 6 +- tests/test_connect.py | 113 +++++++++++++++++++++++++++++++-- 8 files changed, 219 insertions(+), 46 deletions(-) diff --git a/README.rst b/README.rst index 438b4c44..0d078d82 100644 --- a/README.rst +++ b/README.rst @@ -58,11 +58,18 @@ This enables asyncpg to have easy-to-use support for: Installation ------------ -asyncpg is available on PyPI and has no dependencies. -Use pip to install:: +asyncpg is available on PyPI. When not using GSSAPI/SSPI authentication it +has no dependencies. Use pip to install:: $ pip install asyncpg +If you need GSSAPI/SSPI authentication, use:: + + $ pip install 'asyncpg[gssauth]' + +For more details, please `see the documentation +`_. + Basic Usage ----------- diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 8039d1b4..0631f976 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -57,6 +57,7 @@ def parse(cls, sslmode): 'server_settings', 'target_session_attrs', 'krbsrvname', + 'gsslib', ]) @@ -262,7 +263,7 @@ def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]: def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, direct_tls, server_settings, - target_session_attrs, krbsrvname): + target_session_attrs, krbsrvname, gsslib): # `auth_hosts` is the version of host information for the purposes # of reading the pgpass file. auth_hosts = None @@ -389,6 +390,11 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if krbsrvname is None: krbsrvname = val + if 'gsslib' in query: + val = query.pop('gsslib') + if gsslib is None: + gsslib = val + if query: if server_settings is None: server_settings = query @@ -659,12 +665,21 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if krbsrvname is None: krbsrvname = os.getenv('PGKRBSRVNAME') + if gsslib is None: + gsslib = os.getenv('PGGSSLIB') + if gsslib is None: + gsslib = 'sspi' if _system == 'Windows' else 'gssapi' + if gsslib not in {'gssapi', 'sspi'}: + raise exceptions.ClientConfigurationError( + "gsslib parameter must be either 'gssapi' or 'sspi'" + ", got {!r}".format(gsslib)) + params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, sslmode=sslmode, direct_tls=direct_tls, server_settings=server_settings, target_session_attrs=target_session_attrs, - krbsrvname=krbsrvname) + krbsrvname=krbsrvname, gsslib=gsslib) return addrs, params @@ -675,7 +690,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, max_cached_statement_lifetime, max_cacheable_statement_size, ssl, direct_tls, server_settings, - target_session_attrs, krbsrvname): + target_session_attrs, krbsrvname, gsslib): local_vars = locals() for var_name in {'max_cacheable_statement_size', 'max_cached_statement_lifetime', @@ -705,7 +720,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, direct_tls=direct_tls, database=database, server_settings=server_settings, target_session_attrs=target_session_attrs, - krbsrvname=krbsrvname) + krbsrvname=krbsrvname, gsslib=gsslib) config = _ClientConfiguration( command_timeout=command_timeout, diff --git a/asyncpg/connection.py b/asyncpg/connection.py index bf5f6db6..e54d6df8 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2008,7 +2008,8 @@ async def connect(dsn=None, *, record_class=protocol.Record, server_settings=None, target_session_attrs=None, - krbsrvname=None): + krbsrvname=None, + gsslib=None): r"""A coroutine to establish a connection to a PostgreSQL server. The connection parameters may be specified either as a connection @@ -2240,6 +2241,10 @@ async def connect(dsn=None, *, Kerberos service name to use when authenticating with GSSAPI. This must match the server configuration. Defaults to 'postgres'. + :param str gsslib: + GSS library to use for GSSAPI/SSPI authentication. Can be 'gssapi' + or 'sspi'. Defaults to 'sspi' on Windows and 'gssapi' otherwise. + :return: A :class:`~asyncpg.connection.Connection` instance. Example: @@ -2309,7 +2314,7 @@ async def connect(dsn=None, *, Added the *target_session_attrs* parameter. .. versionchanged:: 0.30.0 - Added the *krbsrvname* parameter. + Added the *krbsrvname* and *gsslib* parameters. .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: @@ -2354,6 +2359,7 @@ async def connect(dsn=None, *, max_cacheable_statement_size=max_cacheable_statement_size, target_session_attrs=target_session_attrs, krbsrvname=krbsrvname, + gsslib=gsslib, ) diff --git a/asyncpg/protocol/coreproto.pxd b/asyncpg/protocol/coreproto.pxd index 612d8cae..8a398de9 100644 --- a/asyncpg/protocol/coreproto.pxd +++ b/asyncpg/protocol/coreproto.pxd @@ -91,7 +91,7 @@ cdef class CoreProtocol: object con_params # Instance of SCRAMAuthentication SCRAMAuthentication scram - # Instance of gssapi.SecurityContext + # Instance of gssapi.SecurityContext or sspilib.SecurityContext object gss_ctx readonly int32_t backend_pid @@ -138,7 +138,9 @@ cdef class CoreProtocol: cdef _auth_password_message_md5(self, bytes salt) cdef _auth_password_message_sasl_initial(self, list sasl_auth_methods) cdef _auth_password_message_sasl_continue(self, bytes server_response) - cdef _auth_gss_init(self) + cdef _auth_gss_init_gssapi(self) + cdef _auth_gss_init_sspi(self, bint negotiate) + cdef _auth_gss_get_spn(self) cdef _auth_gss_step(self, bytes server_response) cdef _write(self, buf) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 7a2b257e..fd65327b 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -38,7 +38,8 @@ cdef class CoreProtocol: self.encoding = 'utf-8' # type of `scram` is `SCRAMAuthentcation` self.scram = None - # type of `gss_ctx` is `gssapi.SecurityContext` + # type of `gss_ctx` is `gssapi.SecurityContext` or + # `sspilib.SecurityContext` self.gss_ctx = None self._reset_result() @@ -635,29 +636,33 @@ cdef class CoreProtocol: ) self.scram = None - elif status == AUTH_REQUIRED_GSS: - self._auth_gss_init() - self.auth_msg = self._auth_gss_step(None) + elif status in (AUTH_REQUIRED_GSS, AUTH_REQUIRED_SSPI): + # AUTH_REQUIRED_SSPI is the same as AUTH_REQUIRED_GSS, except that + # it uses protocol negotiation with SSPI clients. Both methods use + # AUTH_REQUIRED_GSS_CONTINUE for subsequent authentication steps. + if self.gss_ctx is not None: + self.result_type = RESULT_FAILED + self.result = apg_exc.InterfaceError( + 'duplicate GSSAPI/SSPI authentication request') + else: + if self.con_params.gsslib == 'gssapi': + self._auth_gss_init_gssapi() + else: + self._auth_gss_init_sspi(status == AUTH_REQUIRED_SSPI) + self.auth_msg = self._auth_gss_step(None) elif status == AUTH_REQUIRED_GSS_CONTINUE: server_response = self.buffer.consume_message() self.auth_msg = self._auth_gss_step(server_response) - elif status in (AUTH_REQUIRED_KERBEROS, AUTH_REQUIRED_SCMCRED, - AUTH_REQUIRED_SSPI): - self.result_type = RESULT_FAILED - self.result = apg_exc.InterfaceError( - 'unsupported authentication method requested by the ' - 'server: {!r}'.format(AUTH_METHOD_NAME[status])) - else: self.result_type = RESULT_FAILED self.result = apg_exc.InterfaceError( 'unsupported authentication method requested by the ' - 'server: {}'.format(status)) + 'server: {!r}'.format(AUTH_METHOD_NAME.get(status, status))) - if status not in [AUTH_SASL_CONTINUE, AUTH_SASL_FINAL, - AUTH_REQUIRED_GSS_CONTINUE]: + if status not in (AUTH_SASL_CONTINUE, AUTH_SASL_FINAL, + AUTH_REQUIRED_GSS_CONTINUE): self.buffer.discard_message() cdef _auth_password_message_cleartext(self): @@ -714,25 +719,43 @@ cdef class CoreProtocol: return msg - cdef _auth_gss_init(self): + cdef _auth_gss_init_gssapi(self): try: import gssapi except ModuleNotFoundError: - raise RuntimeError( - 'gssapi module not found; please install asyncpg[gssapi] to ' - 'use asyncpg with Kerberos or GSSAPI authentication' + raise apg_exc.InterfaceError( + 'gssapi module not found; please install asyncpg[gssauth] to ' + 'use asyncpg with Kerberos/GSSAPI/SSPI authentication' ) from None + self.gss_ctx = gssapi.SecurityContext( + name=gssapi.Name(self._auth_gss_get_spn()), usage='initiate') + + cdef _auth_gss_init_sspi(self, bint negotiate): + try: + import sspilib + except ModuleNotFoundError: + raise apg_exc.InterfaceError( + 'sspilib module not found; please install asyncpg[gssauth] to ' + 'use asyncpg with Kerberos/GSSAPI/SSPI authentication' + ) from None + + self.gss_ctx = sspilib.ClientSecurityContext( + target_name=self._auth_gss_get_spn(), + credential=sspilib.UserCredential( + protocol='Negotiate' if negotiate else 'Kerberos')) + + cdef _auth_gss_get_spn(self): service_name = self.con_params.krbsrvname or 'postgres' # find the canonical name of the server host if isinstance(self.address, str): - raise RuntimeError('GSSAPI authentication is only supported for ' - 'TCP/IP connections') + raise apg_exc.InternalClientError( + 'GSSAPI/SSPI authentication is only supported for TCP/IP ' + 'connections') host = self.address[0] host_cname = socket.gethostbyname_ex(host)[0] - gss_name = gssapi.Name(f'{service_name}/{host_cname}') - self.gss_ctx = gssapi.SecurityContext(name=gss_name, usage='initiate') + return f'{service_name}/{host_cname}' cdef _auth_gss_step(self, bytes server_response): cdef: diff --git a/docs/installation.rst b/docs/installation.rst index 6d9ec2ef..bada7998 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -4,20 +4,35 @@ Installation ============ -**asyncpg** has no external dependencies and the recommended way to -install it is to use **pip**: +**asyncpg** has no external dependencies when not using GSSAPI/SSPI +authentication. The recommended way to install it is to use **pip**: .. code-block:: bash $ pip install asyncpg +If you need GSSAPI/SSPI authentication, the recommended way is to use -.. note:: +.. code-block:: bash + + $ pip install 'asyncpg[gssauth]' + +This installs SSPI support on Windows and GSSAPI support on non-Windows +platforms. SSPI and GSSAPI interoperate as clients and servers: an SSPI +client can authenticate to a GSSAPI server and vice versa. + +On Linux installing GSSAPI requires a working C compiler and Kerberos 5 +development files. The latter can be obtained by installing **libkrb5-dev** +package on Debian/Ubuntu or **krb5-devel** on RHEL/Fedora. (This is needed +because PyPI does not have Linux wheels for **gssapi**. See `here for the +details `_.) + +It is also possible to use GSSAPI on Windows: - It is recommended to use **pip** version **8.1** or later to take - advantage of the precompiled wheel packages. Older versions of pip - will ignore the wheel packages and install asyncpg from the source - package. In that case a working C compiler is required. + * `pip install gssapi` + * Install `Kerberos for Windows `_. + * Set the ``gsslib`` parameter or the ``PGGSSLIB`` environment variable to + `gssapi` when connecting. Building from source diff --git a/pyproject.toml b/pyproject.toml index 0019dadc..12f6ae68 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,8 +35,9 @@ dependencies = [ github = "https://github.com/MagicStack/asyncpg" [project.optional-dependencies] -gssapi = [ - 'gssapi', +gssauth = [ + 'gssapi; platform_system != "Windows"', + 'sspilib; platform_system == "Windows"', ] test = [ 'flake8~=6.1', @@ -44,6 +45,7 @@ test = [ 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.12.0"', 'gssapi; platform_system == "Linux"', 'k5test; platform_system == "Linux"', + 'sspilib; platform_system == "Windows"', 'mypy~=1.8.0', ] docs = [ diff --git a/tests/test_connect.py b/tests/test_connect.py index ebf0e462..049aea26 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -13,6 +13,7 @@ import pathlib import platform import shutil +import socket import ssl import stat import tempfile @@ -45,6 +46,13 @@ CLIENT_SSL_KEY_FILE = os.path.join(CERTS, 'client.key.pem') CLIENT_SSL_PROTECTED_KEY_FILE = os.path.join(CERTS, 'client.key.protected.pem') +if _system == 'Windows': + DEFAULT_GSSLIB = 'sspi' + OTHER_GSSLIB = 'gssapi' +else: + DEFAULT_GSSLIB = 'gssapi' + OTHER_GSSLIB = 'sspi' + @contextlib.contextmanager def mock_dot_postgresql(*, ca=True, crl=False, client=False, protected=False): @@ -398,7 +406,10 @@ def setUpClass(cls): cls.realm.addprinc('postgres/localhost') cls.realm.extract_keytab('postgres/localhost', cls.realm.keytab) - cls.USERS = [(cls.realm.user_princ, 'gss', None)] + cls.USERS = [ + (cls.realm.user_princ, 'gss', None), + (f'wrong-{cls.realm.user_princ}', 'gss', None), + ] super().setUpClass() cls.cluster.override_connection_spec(host='localhost') @@ -427,13 +438,34 @@ async def test_auth_gssapi(self): await self.connect(user=self.realm.user_princ, krbsrvname='wrong') # Credentials mismatch. - self.realm.addprinc('wrong_user', 'password') - self.realm.kinit('wrong_user', 'password') with self.assertRaisesRegex( exceptions.InvalidAuthorizationSpecificationError, 'GSSAPI authentication failed for user' ): - await self.connect(user=self.realm.user_princ) + await self.connect(user=f'wrong-{self.realm.user_princ}') + + +@unittest.skipIf(_system != 'Windows', 'SSPI is only available on Windows') +class TestSspiAuthentication(BaseTestAuthentication): + @classmethod + def setUpClass(cls): + cls.username = f'{os.getlogin()}@{socket.gethostname()}' + cls.USERS = [ + (cls.username, 'sspi', None), + (f'wrong-{cls.username}', 'sspi', None), + ] + super().setUpClass() + + async def test_auth_sspi(self): + conn = await self.connect(user=self.username) + await conn.close() + + # Credentials mismatch. + with self.assertRaisesRegex( + exceptions.InvalidAuthorizationSpecificationError, + 'SSPI authentication failed for user' + ): + await self.connect(user=f'wrong-{self.username}') class TestConnectParams(tb.TestCase): @@ -666,6 +698,9 @@ class TestConnectParams(tb.TestCase): 'name': 'krbsrvname_2', 'dsn': 'postgresql://user@host/db?krbsrvname=srv_qs', 'krbsrvname': 'srv_kws', + 'env': { + 'PGKRBSRVNAME': 'srv_env', + }, 'result': ([('host', 5432)], { 'database': 'db', 'user': 'user', @@ -688,6 +723,69 @@ class TestConnectParams(tb.TestCase): }) }, + { + 'name': 'gsslib', + 'dsn': f'postgresql://user@host/db?gsslib={OTHER_GSSLIB}', + 'env': { + 'PGGSSLIB': 'ignored', + }, + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'gsslib': OTHER_GSSLIB, + }) + }, + + { + 'name': 'gsslib_2', + 'dsn': 'postgresql://user@host/db?gsslib=ignored', + 'gsslib': OTHER_GSSLIB, + 'env': { + 'PGGSSLIB': 'ignored', + }, + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'gsslib': OTHER_GSSLIB, + }) + }, + + { + 'name': 'gsslib_3', + 'dsn': 'postgresql://user@host/db', + 'env': { + 'PGGSSLIB': OTHER_GSSLIB, + }, + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'gsslib': OTHER_GSSLIB, + }) + }, + + { + 'name': 'gsslib_4', + 'dsn': 'postgresql://user@host/db', + 'result': ([('host', 5432)], { + 'database': 'db', + 'user': 'user', + 'target_session_attrs': 'any', + 'gsslib': DEFAULT_GSSLIB, + }) + }, + + { + 'name': 'gsslib_5', + 'dsn': 'postgresql://user@host/db?gsslib=invalid', + 'error': ( + exceptions.ClientConfigurationError, + "gsslib parameter must be either 'gssapi' or 'sspi'" + ), + }, + { 'name': 'dsn_ipv6_multi_host', 'dsn': 'postgresql://user@[2001:db8::1234%25eth0],[::1]/db', @@ -972,6 +1070,7 @@ def run_testcase(self, testcase): server_settings = testcase.get('server_settings') target_session_attrs = testcase.get('target_session_attrs') krbsrvname = testcase.get('krbsrvname') + gsslib = testcase.get('gsslib') expected = testcase.get('result') expected_error = testcase.get('error') @@ -997,7 +1096,7 @@ def run_testcase(self, testcase): direct_tls=False, server_settings=server_settings, target_session_attrs=target_session_attrs, - krbsrvname=krbsrvname) + krbsrvname=krbsrvname, gsslib=gsslib) params = { k: v for k, v in params._asdict().items() @@ -1019,6 +1118,10 @@ def run_testcase(self, testcase): # Avoid the hassle of specifying direct_tls # unless explicitly tested for params.pop('direct_tls', False) + if 'gsslib' not in expected[1]: + # Avoid the hassle of specifying gsslib + # unless explicitly tested for + params.pop('gsslib', None) self.assertEqual(expected, result, 'Testcase: {}'.format(testcase)) From 1194a8a64484da63ecffc9b2f4ccedf5b60ee23c Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 7 Jun 2024 14:35:22 -0700 Subject: [PATCH 142/193] s/quote/quote_plus/ in the note about DSN part quoting Noticed in #1151 --- asyncpg/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index e54d6df8..10502c8d 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2034,7 +2034,7 @@ async def connect(dsn=None, *, .. note:: The URI must be *valid*, which means that all components must - be properly quoted with :py:func:`urllib.parse.quote`, and + be properly quoted with :py:func:`urllib.parse.quote_plus`, and any literal IPv6 addresses must be enclosed in square brackets. For example: From 9fcddfc1c0b6e949a2c16e193d5472699b78458d Mon Sep 17 00:00:00 2001 From: Eugene Toder Date: Sat, 13 Jul 2024 06:33:14 -0400 Subject: [PATCH 143/193] Use asyncio.run() instead of run_until_complete() (#1140) Mostly in the documentation. --- README.rst | 3 +-- asyncpg/connection.py | 16 ++++++++-------- docs/api/index.rst | 3 +-- docs/usage.rst | 7 +++---- tools/generate_type_map.py | 5 +---- 5 files changed, 14 insertions(+), 20 deletions(-) diff --git a/README.rst b/README.rst index 0d078d82..f7583a89 100644 --- a/README.rst +++ b/README.rst @@ -88,8 +88,7 @@ Basic Usage ) await conn.close() - loop = asyncio.get_event_loop() - loop.run_until_complete(run()) + asyncio.run(run()) License diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 10502c8d..a7b17b5b 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -800,7 +800,7 @@ async def copy_from_table(self, table_name, *, output, ... output='file.csv', format='csv') ... print(result) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) 'COPY 100' .. _`COPY statement documentation`: @@ -869,7 +869,7 @@ async def copy_from_query(self, query, *args, output, ... output='file.csv', format='csv') ... print(result) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) 'COPY 10' .. _`COPY statement documentation`: @@ -945,7 +945,7 @@ async def copy_to_table(self, table_name, *, source, ... 'mytable', source='datafile.tbl') ... print(result) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) 'COPY 140000' .. _`COPY statement documentation`: @@ -1027,7 +1027,7 @@ async def copy_records_to_table(self, table_name, *, records, ... (2, 'ham', 'spam')]) ... print(result) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) 'COPY 2' Asynchronous record iterables are also supported: @@ -1045,7 +1045,7 @@ async def copy_records_to_table(self, table_name, *, records, ... 'mytable', records=record_gen(100)) ... print(result) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) 'COPY 100' .. versionadded:: 0.11.0 @@ -1305,7 +1305,7 @@ async def set_type_codec(self, typename, *, ... print(result) ... print(datetime.datetime(2002, 1, 1) + result) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) relativedelta(years=+2, months=+3, days=+1) 2004-04-02 00:00:00 @@ -1772,7 +1772,7 @@ async def reload_schema_state(self): ... await con.execute('LOCK TABLE tbl') ... await change_type(con) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) .. versionadded:: 0.14.0 """ @@ -2258,7 +2258,7 @@ async def connect(dsn=None, *, ... types = await con.fetch('SELECT * FROM pg_type') ... print(types) ... - >>> asyncio.get_event_loop().run_until_complete(run()) + >>> asyncio.run(run()) [>> import asyncpg, asyncio - >>> loop = asyncio.get_event_loop() >>> async def run(): ... conn = await asyncpg.connect() ... stmt = await conn.prepare('''SELECT 2 ^ $1''') ... print(await stmt.fetchval(10)) ... print(await stmt.fetchval(20)) ... - >>> loop.run_until_complete(run()) + >>> asyncio.run(run()) 1024.0 1048576.0 diff --git a/docs/usage.rst b/docs/usage.rst index 21281b6b..3d36b199 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -44,8 +44,7 @@ which provides methods to run queries and manage transactions. # Close the connection. await conn.close() - asyncio.get_event_loop().run_until_complete(main()) - + asyncio.run(main()) .. note:: @@ -344,7 +343,7 @@ shows how to instruct asyncpg to use floats instead. finally: await conn.close() - asyncio.get_event_loop().run_until_complete(main()) + asyncio.run(main()) Example: decoding hstore values @@ -369,7 +368,7 @@ be registered on a connection using :meth:`Connection.set_builtin_type_codec() result = await conn.fetchval("SELECT 'a=>1,b=>2,c=>NULL'::hstore") assert result == {'a': '1', 'b': '2', 'c': None} - asyncio.get_event_loop().run_until_complete(run()) + asyncio.run(run()) .. _hstore: https://www.postgresql.org/docs/current/static/hstore.html diff --git a/tools/generate_type_map.py b/tools/generate_type_map.py index 8fa816a4..b4e90664 100755 --- a/tools/generate_type_map.py +++ b/tools/generate_type_map.py @@ -124,10 +124,7 @@ def main(): help='PostgreSQL server user') args = parser.parse_args() - - loop = asyncio.get_event_loop() - - loop.run_until_complete(runner(args)) + asyncio.run(runner(args)) if __name__ == '__main__': From 85fca3f3534c740e82f267fdad3549c6a7ffd0ee Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sat, 13 Jul 2024 12:38:07 +0200 Subject: [PATCH 144/193] Unbreak documentation build. --- asyncpg/connection.py | 10 ++++------ docs/conf.py | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index a7b17b5b..18892cfd 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -231,9 +231,8 @@ def add_query_logger(self, callback): :param callable callback: A callable or a coroutine function receiving one argument: - **record**: a LoggedQuery containing `query`, `args`, `timeout`, - `elapsed`, `exception`, `conn_addr`, and - `conn_params`. + **record**, a LoggedQuery containing `query`, `args`, `timeout`, + `elapsed`, `exception`, `conn_addr`, and `conn_params`. .. versionadded:: 0.29.0 """ @@ -1809,9 +1808,8 @@ def query_logger(self, callback): :param callable callback: A callable or a coroutine function receiving one argument: - **record**: a LoggedQuery containing `query`, `args`, `timeout`, - `elapsed`, `exception`, `conn_addr`, and - `conn_params`. + **record**, a LoggedQuery containing `query`, `args`, `timeout`, + `elapsed`, `exception`, `conn_addr`, and `conn_params`. Example: diff --git a/docs/conf.py b/docs/conf.py index cb371299..27924236 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,7 +10,7 @@ with open(version_file, 'r') as f: for line in f: - if line.startswith('__version__ ='): + if line.startswith('__version__: typing.Final ='): _, _, version = line.partition('=') version = version.strip(" \n'\"") break From 7f00484987b174a900f40886ca03d535ec3f42af Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Sun, 14 Jul 2024 12:36:54 -0700 Subject: [PATCH 145/193] .github: Explicitly install Postgres in platform tests (#1164) It appears that Github no longer preinstalls Postgres in their macOS images, so do this explicitly. --- .github/workflows/tests.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b7229e18..ab64ed8b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -53,6 +53,12 @@ jobs: version_line_pattern: | __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + - name: Setup PostgreSQL + uses: tj-actions/install-postgresql@2a80e9368dff47cd05fee5bf3cf7d88f68c2f8e9 # v3.1.1 + if: steps.release.outputs.version == 0 && matrix.os == 'macos-latest' + with: + postgresql-version: 16 + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 if: steps.release.outputs.version == 0 From 636420b115fa58d462a13b567a5ca48340ab6ed6 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 17 Jul 2024 14:43:48 -0700 Subject: [PATCH 146/193] Mark pool-wrapped connection coroutine methods as coroutines (#1134) Use `markcoroutinefunction` (available in Python 3.12+) to make `inspect.iscoroutinefunction()` return the correct answer for wrapped connection methods. Fixes: #1133 --- asyncpg/compat.py | 7 +++++++ asyncpg/pool.py | 8 ++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 435b4c48..881873a2 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -52,6 +52,13 @@ async def wait_closed(stream: asyncio.StreamWriter) -> None: pass +if sys.version_info < (3, 12): + def markcoroutinefunction(c): # type: ignore + pass +else: + from inspect import markcoroutinefunction # noqa: F401 + + if sys.version_info < (3, 12): from ._asyncio_compat import wait_for as wait_for # noqa: F401 else: diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 06e698df..7b588c27 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -33,7 +33,8 @@ def __new__(mcls, name, bases, dct, *, wrap=False): if not inspect.isfunction(meth): continue - wrapper = mcls._wrap_connection_method(attrname) + iscoroutine = inspect.iscoroutinefunction(meth) + wrapper = mcls._wrap_connection_method(attrname, iscoroutine) wrapper = functools.update_wrapper(wrapper, meth) dct[attrname] = wrapper @@ -43,7 +44,7 @@ def __new__(mcls, name, bases, dct, *, wrap=False): return super().__new__(mcls, name, bases, dct) @staticmethod - def _wrap_connection_method(meth_name): + def _wrap_connection_method(meth_name, iscoroutine): def call_con_method(self, *args, **kwargs): # This method will be owned by PoolConnectionProxy class. if self._con is None: @@ -55,6 +56,9 @@ def call_con_method(self, *args, **kwargs): meth = getattr(self._con.__class__, meth_name) return meth(self._con, *args, **kwargs) + if iscoroutine: + compat.markcoroutinefunction(call_con_method) + return call_con_method From 258d8a95e97be26e57118c43f5efa912260dd1e8 Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Wed, 17 Jul 2024 17:44:46 -0400 Subject: [PATCH 147/193] Allow Cython 3 (#1101) Fixes #1083. --- pyproject.toml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 12f6ae68..2da1b880 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,7 +59,7 @@ requires = [ "setuptools>=60", "wheel", - "Cython(>=0.29.24,<3.0.0)" + "Cython(>=0.29.24,<4.0.0)" ] build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py index f7c3c471..5f2709d7 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ from setuptools.command import build_ext as setuptools_build_ext -CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<0.30.0)' +CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<4.0.0)' CFLAGS = ['-O2'] LDFLAGS = [] From e8488149e4f9c1e1f276ee85bc5f959512af12f9 Mon Sep 17 00:00:00 2001 From: Paulo Freitas <34652880+paulovitorweb@users.noreply.github.com> Date: Wed, 17 Jul 2024 18:45:29 -0300 Subject: [PATCH 148/193] docs: fix connection pool close call (#1125) --- docs/usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/usage.rst b/docs/usage.rst index 3d36b199..e490a2ef 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -442,7 +442,7 @@ Web service that computes the requested power of two. app['pool'] = await asyncpg.create_pool(database='postgres', user='postgres') yield - app['pool'].close() + await app['pool'].close() def init_app(): From 7dc58728f7d28608a50d3e0287ac5d8cfcfe6305 Mon Sep 17 00:00:00 2001 From: guacs <126393040+guacs@users.noreply.github.com> Date: Thu, 18 Jul 2024 03:22:31 +0530 Subject: [PATCH 149/193] fix: return the pool from _async_init__ if it's already initialized (#1104) --- asyncpg/pool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 7b588c27..8a00d64b 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -396,7 +396,7 @@ def __init__(self, *connect_args, async def _async__init__(self): if self._initialized: - return + return self if self._initializing: raise exceptions.InterfaceError( 'pool is being initialized in another task') From 11101c6eff3abb8c734b8880f2a6bd5bc3160255 Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Wed, 17 Jul 2024 17:54:06 -0400 Subject: [PATCH 150/193] Replace obsolete, unsafe Py_TRASHCAN_SAFE_BEGIN/END (#1150) Use Py_TRASHCAN_BEGIN/END instead. https://bugs.python.org/issue44874 These are removed from the limited C API in Python 3.9, deprecated in 3.11, and removed in Python 3.13: https://docs.python.org/3.13/whatsnew/3.13.html#id8 --- asyncpg/protocol/record/recordobj.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index c0049217..9767f43b 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -98,7 +98,7 @@ record_dealloc(ApgRecordObject *o) Py_CLEAR(o->desc); - Py_TRASHCAN_SAFE_BEGIN(o) + Py_TRASHCAN_BEGIN(o, record_dealloc) if (len > 0) { i = len; while (--i >= 0) { @@ -117,7 +117,7 @@ record_dealloc(ApgRecordObject *o) } Py_TYPE(o)->tp_free((PyObject *)o); done: - Py_TRASHCAN_SAFE_END(o) + Py_TRASHCAN_END } From 98aebf1fab88fd8bed1f5aa890736d9cfb2abb59 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 17 Jul 2024 16:06:43 -0700 Subject: [PATCH 151/193] Drop a bit of dead code The `cache_key` argument to `ConnectionSettings` is unused. --- asyncpg/protocol/codecs/base.pyx | 2 +- asyncpg/protocol/protocol.pyi | 2 +- asyncpg/protocol/settings.pyx | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index c269e374..e8b44c74 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -483,7 +483,7 @@ cdef uint32_t pylong_as_oid(val) except? 0xFFFFFFFFl: cdef class DataCodecConfig: - def __init__(self, cache_key): + def __init__(self): # Codec instance cache for derived types: # composites, arrays, ranges, domains and their combinations. self._derived_type_codecs = {} diff --git a/asyncpg/protocol/protocol.pyi b/asyncpg/protocol/protocol.pyi index f85c5b6d..b81d13cd 100644 --- a/asyncpg/protocol/protocol.pyi +++ b/asyncpg/protocol/protocol.pyi @@ -222,7 +222,7 @@ class Codec: class DataCodecConfig: __pyx_vtable__: Any - def __init__(self, cache_key: object) -> None: ... + def __init__(self) -> None: ... def add_python_codec( self, typeoid: int, diff --git a/asyncpg/protocol/settings.pyx b/asyncpg/protocol/settings.pyx index 8e6591b9..2b535666 100644 --- a/asyncpg/protocol/settings.pyx +++ b/asyncpg/protocol/settings.pyx @@ -11,12 +11,12 @@ from asyncpg import exceptions @cython.final cdef class ConnectionSettings(pgproto.CodecContext): - def __cinit__(self, conn_key): + def __cinit__(self): self._encoding = 'utf-8' self._is_utf8 = True self._settings = {} self._codec = codecs.lookup('utf-8') - self._data_codecs = DataCodecConfig(conn_key) + self._data_codecs = DataCodecConfig() cdef add_setting(self, str name, str val): self._settings[name] = val From 85d7eed40637e7cad73a44ed2439ffeb2a8dc1c2 Mon Sep 17 00:00:00 2001 From: Eugene Toder Date: Thu, 18 Jul 2024 10:06:28 -0400 Subject: [PATCH 152/193] gssapi: use hostbased_service name type (#1167) This is used by libpq and allows us to skip canonicalization of host name, which was making a blocking DNS lookup. Similarly, don't canonicalize host name for SSPI, since this is not done by libpq. --- asyncpg/protocol/coreproto.pxd | 2 +- asyncpg/protocol/coreproto.pyx | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/asyncpg/protocol/coreproto.pxd b/asyncpg/protocol/coreproto.pxd index 8a398de9..f6a0b08f 100644 --- a/asyncpg/protocol/coreproto.pxd +++ b/asyncpg/protocol/coreproto.pxd @@ -140,7 +140,7 @@ cdef class CoreProtocol: cdef _auth_password_message_sasl_continue(self, bytes server_response) cdef _auth_gss_init_gssapi(self) cdef _auth_gss_init_sspi(self, bint negotiate) - cdef _auth_gss_get_spn(self) + cdef _auth_gss_get_service(self) cdef _auth_gss_step(self, bytes server_response) cdef _write(self, buf) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index fd65327b..4ef438cd 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -6,7 +6,6 @@ import hashlib -import socket include "scram.pyx" @@ -728,8 +727,11 @@ cdef class CoreProtocol: 'use asyncpg with Kerberos/GSSAPI/SSPI authentication' ) from None + service_name, host = self._auth_gss_get_service() self.gss_ctx = gssapi.SecurityContext( - name=gssapi.Name(self._auth_gss_get_spn()), usage='initiate') + name=gssapi.Name( + f'{service_name}@{host}', gssapi.NameType.hostbased_service), + usage='initiate') cdef _auth_gss_init_sspi(self, bint negotiate): try: @@ -740,22 +742,20 @@ cdef class CoreProtocol: 'use asyncpg with Kerberos/GSSAPI/SSPI authentication' ) from None + service_name, host = self._auth_gss_get_service() self.gss_ctx = sspilib.ClientSecurityContext( - target_name=self._auth_gss_get_spn(), + target_name=f'{service_name}/{host}', credential=sspilib.UserCredential( protocol='Negotiate' if negotiate else 'Kerberos')) - cdef _auth_gss_get_spn(self): + cdef _auth_gss_get_service(self): service_name = self.con_params.krbsrvname or 'postgres' - # find the canonical name of the server host if isinstance(self.address, str): raise apg_exc.InternalClientError( 'GSSAPI/SSPI authentication is only supported for TCP/IP ' 'connections') - host = self.address[0] - host_cname = socket.gethostbyname_ex(host)[0] - return f'{service_name}/{host_cname}' + return service_name, self.address[0] cdef _auth_gss_step(self, bytes server_response): cdef: From 0a322a2e4ca1c3c3cf6c2cf22b236a6da6c61680 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:56:46 -0700 Subject: [PATCH 153/193] Bump actions/download-artifact from 3 to 4.1.7 in /.github/workflows (#1178) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4.1.7. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4.1.7) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 450f471e..2e843f85 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -186,7 +186,7 @@ jobs: fetch-depth: 5 submodules: false - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4.1.7 with: name: dist path: dist/ From 597fe541d1eb251d63ee55a62e58a00db6181cf0 Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Thu, 17 Oct 2024 15:22:13 -0400 Subject: [PATCH 154/193] Allow testing with uvloop on Python 3.12 (#1182) There have been binary wheels for uvloop for Python 3.12 for some time. --------- Co-authored-by: Elvis Pranskevichus --- .github/workflows/tests.yml | 3 --- pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ab64ed8b..0e63b989 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,9 +24,6 @@ jobs: # uvloop does not support windows - loop: uvloop os: windows-latest - # No 3.12 release yet - - loop: uvloop - python-version: "3.12" runs-on: ${{ matrix.os }} diff --git a/pyproject.toml b/pyproject.toml index 2da1b880..ebf30858 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ gssauth = [ test = [ 'flake8~=6.1', 'flake8-pyi~=24.1.0', - 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.12.0"', + 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.14.0"', 'gssapi; platform_system == "Linux"', 'k5test; platform_system == "Linux"', 'sspilib; platform_system == "Windows"', From 73a7af5afe34e64aae734d597843a06aa7e564e7 Mon Sep 17 00:00:00 2001 From: ArtemIsmagilov <118372045+ArtemIsmagilov@users.noreply.github.com> Date: Thu, 17 Oct 2024 23:23:10 +0400 Subject: [PATCH 155/193] Remove empty line (#1168) --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ebf30858..e1319d54 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,6 @@ docs = [ requires = [ "setuptools>=60", "wheel", - "Cython(>=0.29.24,<4.0.0)" ] build-backend = "setuptools.build_meta" From 327f2a7a306a23768a43be43b61c7a059afd155b Mon Sep 17 00:00:00 2001 From: Iurii Pliner Date: Thu, 17 Oct 2024 20:25:13 +0100 Subject: [PATCH 156/193] Require async_timeout for python < 3.11 (#1177) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e1319d54..d7a6ebcb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ classifiers = [ "Topic :: Database :: Front-Ends", ] dependencies = [ - 'async_timeout>=4.0.3; python_version < "3.12.0"', + 'async_timeout>=4.0.3; python_version < "3.11.0"', ] [project.urls] From aa2d0e69cc3c338d799a951cd984ede3c997af9f Mon Sep 17 00:00:00 2001 From: MeggyCal Date: Thu, 17 Oct 2024 21:25:40 +0200 Subject: [PATCH 157/193] notice also the MERGE command (#1185) --- asyncpg/prepared_stmt.py | 4 ++-- asyncpg/protocol/prepared_stmt.pyx | 2 +- tests/test_exceptions.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/asyncpg/prepared_stmt.py b/asyncpg/prepared_stmt.py index 8e241d67..195d0056 100644 --- a/asyncpg/prepared_stmt.py +++ b/asyncpg/prepared_stmt.py @@ -147,8 +147,8 @@ async def explain(self, *args, analyze=False): # will discard any output that a SELECT would return, other # side effects of the statement will happen as usual. If you # wish to use EXPLAIN ANALYZE on an INSERT, UPDATE, DELETE, - # CREATE TABLE AS, or EXECUTE statement without letting the - # command affect your data, use this approach: + # MERGE, CREATE TABLE AS, or EXECUTE statement without letting + # the command affect your data, use this approach: # BEGIN; # EXPLAIN ANALYZE ...; # ROLLBACK; diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index 7335825c..cb0afa24 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -142,7 +142,7 @@ cdef class PreparedStatementState: # that the user tried to parametrize a statement that does # not support parameters. hint += (r' Note that parameters are supported only in' - r' SELECT, INSERT, UPDATE, DELETE, and VALUES' + r' SELECT, INSERT, UPDATE, DELETE, MERGE and VALUES' r' statements, and will *not* work in statements ' r' like CREATE VIEW or DECLARE CURSOR.') diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index dbffcc2d..e8bb7a1d 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -42,7 +42,7 @@ async def test_exceptions_str(self): self.assertEqual( e.detail, "Function's final statement must be SELECT or " - "INSERT/UPDATE/DELETE RETURNING.") + "INSERT/UPDATE/DELETE/MERGE RETURNING.") self.assertIn( 'DETAIL: Function', str(e) ) From 8f2be4cae9407b2bebf9cd25d26a85710aa83365 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Oct 2024 12:57:36 -0700 Subject: [PATCH 158/193] Unbreak test broken in #1185 --- tests/test_exceptions.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index e8bb7a1d..64ca921d 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -39,12 +39,18 @@ async def test_exceptions_str(self): CREATE FUNCTION foo() RETURNS bool AS $$ $$ LANGUAGE SQL; ''') except asyncpg.InvalidFunctionDefinitionError as e: - self.assertEqual( - e.detail, - "Function's final statement must be SELECT or " - "INSERT/UPDATE/DELETE/MERGE RETURNING.") - self.assertIn( - 'DETAIL: Function', str(e) - ) + if self.server_version < (17, 0): + detail = ( + "Function's final statement must be SELECT or " + "INSERT/UPDATE/DELETE RETURNING." + ) + else: + detail = ( + "Function's final statement must be SELECT or " + "INSERT/UPDATE/DELETE/MERGE RETURNING." + ) + + self.assertEqual(e.detail, detail) + self.assertIn('DETAIL: Function', str(e)) else: self.fail('InvalidFunctionDefinitionError not raised') From 259d16e53d4850c085d08716c5f442c83c04cd08 Mon Sep 17 00:00:00 2001 From: Francisco Obispo <92332689+fobispotc@users.noreply.github.com> Date: Thu, 17 Oct 2024 15:55:22 -0700 Subject: [PATCH 159/193] handle `None` parameters in query, returning NULL (#1180) --------- Co-authored-by: Elvis Pranskevichus --- asyncpg/utils.py | 9 ++++++++- tests/test_copy.py | 8 +++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/asyncpg/utils.py b/asyncpg/utils.py index 3940e04d..5c1ca699 100644 --- a/asyncpg/utils.py +++ b/asyncpg/utils.py @@ -42,4 +42,11 @@ async def _mogrify(conn, query, args): # Finally, replace $n references with text values. return re.sub( - r'\$(\d+)\b', lambda m: textified[int(m.group(1)) - 1], query) + r"\$(\d+)\b", + lambda m: ( + textified[int(m.group(1)) - 1] + if textified[int(m.group(1)) - 1] is not None + else "NULL" + ), + query, + ) diff --git a/tests/test_copy.py b/tests/test_copy.py index be2aabaf..e119e6d8 100644 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -148,12 +148,14 @@ async def test_copy_from_query_with_args(self): res = await self.con.copy_from_query(''' SELECT - i, i * 10 + i, + i * 10, + $2::text FROM generate_series(1, 5) AS i WHERE i = $1 - ''', 3, output=f) + ''', 3, None, output=f) self.assertEqual(res, 'COPY 1') @@ -161,7 +163,7 @@ async def test_copy_from_query_with_args(self): self.assertEqual( output, [ - '3\t30', + '3\t30\t\\N', '' ] ) From afdb05c7bffc6b199388148f0497ce8a5bc77e25 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Oct 2024 17:43:32 -0700 Subject: [PATCH 160/193] Add support for the `sslnegotiation` parameter (#1187) Direct TLS connections are already supported via the `direct_tls` argument, however PostgreSQL 17 added native support for this via `sslnegotiation`, so recognize it in DSNs and the environment. I decided not to introduce the `sslnegotiation` connection constructor argument for now, `direct_tls` should continue to be used instead. --- asyncpg/compat.py | 8 ++++++ asyncpg/connect_utils.py | 44 ++++++++++++++++++++++++++---- asyncpg/connection.py | 2 +- pyproject.toml | 9 ++++++ tests/test_connect.py | 59 +++++++++++++++++++++++++++++++++++++++- 5 files changed, 114 insertions(+), 8 deletions(-) diff --git a/asyncpg/compat.py b/asyncpg/compat.py index 881873a2..57eec650 100644 --- a/asyncpg/compat.py +++ b/asyncpg/compat.py @@ -6,6 +6,7 @@ from __future__ import annotations +import enum import pathlib import platform import typing @@ -78,3 +79,10 @@ def markcoroutinefunction(c): # type: ignore from collections.abc import ( # noqa: F401 Awaitable as Awaitable, ) + +if sys.version_info < (3, 11): + class StrEnum(str, enum.Enum): + __str__ = str.__str__ + __repr__ = enum.Enum.__repr__ +else: + from enum import StrEnum as StrEnum # noqa: F401 diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 0631f976..4890d007 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -45,6 +45,11 @@ def parse(cls, sslmode): return getattr(cls, sslmode.replace('-', '_')) +class SSLNegotiation(compat.StrEnum): + postgres = "postgres" + direct = "direct" + + _ConnectionParameters = collections.namedtuple( 'ConnectionParameters', [ @@ -53,7 +58,7 @@ def parse(cls, sslmode): 'database', 'ssl', 'sslmode', - 'direct_tls', + 'ssl_negotiation', 'server_settings', 'target_session_attrs', 'krbsrvname', @@ -269,6 +274,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, auth_hosts = None sslcert = sslkey = sslrootcert = sslcrl = sslpassword = None ssl_min_protocol_version = ssl_max_protocol_version = None + sslnegotiation = None if dsn: parsed = urllib.parse.urlparse(dsn) @@ -362,6 +368,9 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if 'sslrootcert' in query: sslrootcert = query.pop('sslrootcert') + if 'sslnegotiation' in query: + sslnegotiation = query.pop('sslnegotiation') + if 'sslcrl' in query: sslcrl = query.pop('sslcrl') @@ -503,13 +512,36 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if ssl is None and have_tcp_addrs: ssl = 'prefer' + if direct_tls is not None: + sslneg = ( + SSLNegotiation.direct if direct_tls else SSLNegotiation.postgres + ) + else: + if sslnegotiation is None: + sslnegotiation = os.environ.get("PGSSLNEGOTIATION") + + if sslnegotiation is not None: + try: + sslneg = SSLNegotiation(sslnegotiation) + except ValueError: + modes = ', '.join( + m.name.replace('_', '-') + for m in SSLNegotiation + ) + raise exceptions.ClientConfigurationError( + f'`sslnegotiation` parameter must be one of: {modes}' + ) from None + else: + sslneg = SSLNegotiation.postgres + if isinstance(ssl, (str, SSLMode)): try: sslmode = SSLMode.parse(ssl) except AttributeError: modes = ', '.join(m.name.replace('_', '-') for m in SSLMode) raise exceptions.ClientConfigurationError( - '`sslmode` parameter must be one of: {}'.format(modes)) + '`sslmode` parameter must be one of: {}'.format(modes) + ) from None # docs at https://www.postgresql.org/docs/10/static/libpq-connect.html if sslmode < SSLMode.allow: @@ -676,7 +708,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, params = _ConnectionParameters( user=user, password=password, database=database, ssl=ssl, - sslmode=sslmode, direct_tls=direct_tls, + sslmode=sslmode, ssl_negotiation=sslneg, server_settings=server_settings, target_session_attrs=target_session_attrs, krbsrvname=krbsrvname, gsslib=gsslib) @@ -882,9 +914,9 @@ async def __connect_addr( # UNIX socket connector = loop.create_unix_connection(proto_factory, addr) - elif params.ssl and params.direct_tls: - # if ssl and direct_tls are given, skip STARTTLS and perform direct - # SSL connection + elif params.ssl and params.ssl_negotiation is SSLNegotiation.direct: + # if ssl and ssl_negotiation is `direct`, skip STARTTLS and perform + # direct SSL connection connector = loop.create_connection( proto_factory, *addr, ssl=params.ssl ) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 18892cfd..6ac2a09d 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2001,7 +2001,7 @@ async def connect(dsn=None, *, max_cacheable_statement_size=1024 * 15, command_timeout=None, ssl=None, - direct_tls=False, + direct_tls=None, connection_class=Connection, record_class=protocol.Record, server_settings=None, diff --git a/pyproject.toml b/pyproject.toml index d7a6ebcb..15c034f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,6 +112,15 @@ exclude_lines = [ show_missing = true [tool.mypy] +exclude = [ + "^.eggs", + "^.github", + "^.vscode", + "^build", + "^dist", + "^docs", + "^tests", +] incremental = true strict = true implicit_reexport = true diff --git a/tests/test_connect.py b/tests/test_connect.py index 049aea26..517f05f9 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -592,6 +592,58 @@ class TestConnectParams(tb.TestCase): 'target_session_attrs': 'any'}) }, + { + 'name': 'params_ssl_negotiation_dsn', + 'env': { + 'PGSSLNEGOTIATION': 'postgres' + }, + + 'dsn': 'postgres://u:p@localhost/d?sslnegotiation=direct', + + 'result': ([('localhost', 5432)], { + 'user': 'u', + 'password': 'p', + 'database': 'd', + 'ssl_negotiation': 'direct', + 'target_session_attrs': 'any', + }) + }, + + { + 'name': 'params_ssl_negotiation_env', + 'env': { + 'PGSSLNEGOTIATION': 'direct' + }, + + 'dsn': 'postgres://u:p@localhost/d', + + 'result': ([('localhost', 5432)], { + 'user': 'u', + 'password': 'p', + 'database': 'd', + 'ssl_negotiation': 'direct', + 'target_session_attrs': 'any', + }) + }, + + { + 'name': 'params_ssl_negotiation_params', + 'env': { + 'PGSSLNEGOTIATION': 'direct' + }, + + 'dsn': 'postgres://u:p@localhost/d', + 'direct_tls': False, + + 'result': ([('localhost', 5432)], { + 'user': 'u', + 'password': 'p', + 'database': 'd', + 'ssl_negotiation': 'postgres', + 'target_session_attrs': 'any', + }) + }, + { 'name': 'dsn_overrides_env_partially_ssl_prefer', 'env': { @@ -1067,6 +1119,7 @@ def run_testcase(self, testcase): passfile = testcase.get('passfile') database = testcase.get('database') sslmode = testcase.get('ssl') + direct_tls = testcase.get('direct_tls') server_settings = testcase.get('server_settings') target_session_attrs = testcase.get('target_session_attrs') krbsrvname = testcase.get('krbsrvname') @@ -1093,7 +1146,7 @@ def run_testcase(self, testcase): addrs, params = connect_utils._parse_connect_dsn_and_args( dsn=dsn, host=host, port=port, user=user, password=password, passfile=passfile, database=database, ssl=sslmode, - direct_tls=False, + direct_tls=direct_tls, server_settings=server_settings, target_session_attrs=target_session_attrs, krbsrvname=krbsrvname, gsslib=gsslib) @@ -1118,6 +1171,10 @@ def run_testcase(self, testcase): # Avoid the hassle of specifying direct_tls # unless explicitly tested for params.pop('direct_tls', False) + if 'ssl_negotiation' not in expected[1]: + # Avoid the hassle of specifying sslnegotiation + # unless explicitly tested for + params.pop('ssl_negotiation', False) if 'gsslib' not in expected[1]: # Avoid the hassle of specifying gsslib # unless explicitly tested for From 3aa98944a89cb6a706495581bba3d6bcc680ba54 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 17 Oct 2024 18:01:36 -0700 Subject: [PATCH 161/193] Test and build on Python 3.13 (#1188) Regenerate test certs, fix errors in X.509 extension confiuration --- .github/workflows/release.yml | 18 +++---- .github/workflows/tests.yml | 6 +-- tests/certs/ca.cert.pem | 54 +++++++++---------- tests/certs/ca.key.pem | 98 +++++++++++++++++------------------ tests/certs/gen.py | 16 +++++- tests/certs/server.cert.pem | 62 +++++++++++----------- tests/certs/server.crl.pem | 19 +++++++ tests/certs/server.key.pem | 98 +++++++++++++++++------------------ 8 files changed, 202 insertions(+), 169 deletions(-) create mode 100644 tests/certs/server.crl.pem diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2e843f85..a32a3aeb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,7 +37,7 @@ jobs: mkdir -p dist/ echo "${VERSION}" > dist/VERSION - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dist path: dist/ @@ -56,7 +56,7 @@ jobs: submodules: true - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" @@ -65,7 +65,7 @@ jobs: pip install -U setuptools wheel pip python setup.py sdist - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dist path: dist/*.tar.* @@ -77,10 +77,10 @@ jobs: include: ${{ steps.set-matrix.outputs.include }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" - - run: pip install cibuildwheel==2.16.2 + - run: pip install cibuildwheel==2.21.3 - id: set-matrix run: | MATRIX_INCLUDE=$( @@ -119,13 +119,13 @@ jobs: if: runner.os == 'Linux' uses: docker/setup-qemu-action@v2 - - uses: pypa/cibuildwheel@fff9ec32ed25a9c576750c91e06b410ed0c15db7 # v2.16.2 + - uses: pypa/cibuildwheel@7940a4c0e76eb2030e473a5f864f291f63ee879b # v2.21.3 with: only: ${{ matrix.only }} env: CIBW_BUILD_VERBOSITY: 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dist path: wheelhouse/*.whl @@ -145,7 +145,7 @@ jobs: submodules: true - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" @@ -186,7 +186,7 @@ jobs: fetch-depth: 5 submodules: false - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v4 with: name: dist path: dist/ diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0e63b989..8b5bc7f5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: @@ -57,7 +57,7 @@ jobs: postgresql-version: 16 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 if: steps.release.outputs.version == 0 with: python-version: ${{ matrix.python-version }} @@ -118,7 +118,7 @@ jobs: >> "${GITHUB_ENV}" - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 if: steps.release.outputs.version == 0 with: python-version: "3.x" diff --git a/tests/certs/ca.cert.pem b/tests/certs/ca.cert.pem index 4a8a7016..de5f9db5 100644 --- a/tests/certs/ca.cert.pem +++ b/tests/certs/ca.cert.pem @@ -1,35 +1,35 @@ -----BEGIN CERTIFICATE----- -MIIGFjCCA/6gAwIBAgIIDAM+rFY5KqgwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNV +MIIGJjCCBA6gAwIBAgIICJCUmtkcj2MwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNV BAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdUb3JvbnRvMRgwFgYD VQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5jcGcgdGVzdHMxHTAb BgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJKoZIhvcNAQkBFg5oZWxs -b0BtYWdpYy5pbzAeFw0yMTA5MTMxNjA2MDFaFw00MDExMTMxNjA2MDFaMIGhMQsw +b0BtYWdpYy5pbzAeFw0yNDEwMTYxNzIzNTZaFw00MzEyMTcxNzIzNTZaMIGhMQsw CQYDVQQGEwJDQTEQMA4GA1UECAwHT250YXJpbzEQMA4GA1UEBwwHVG9yb250bzEY MBYGA1UECgwPTWFnaWNTdGFjayBJbmMuMRYwFAYDVQQLDA1hc3luY3BnIHRlc3Rz MR0wGwYDVQQDDBRhc3luY3BnIHRlc3Qgcm9vdCBjYTEdMBsGCSqGSIb3DQEJARYO -aGVsbG9AbWFnaWMuaW8wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDK -mu24288Os23VtRf8kp57sj7+s+PSD/8+KiZiJ4sy5KrUUVijVQgfCpxPzpWWtQ/7 -JbjQMt+kZqJwKqdzXAY8osnljpYYvbNWnc0GZY09F6z95GqVgX/81Fe8W3Jz6I9w -S2CXVneKGtux+6fztKbrA2b1kn69b3xClEHRLFZl9hKG8ck2H+gI5AEDgQmhTIXa -pl85bPuh54uKiUGnedPk07biCw3ZE5GTGWzEq5qMqFEfb19/L1vOvgx/Q4aqmjJw -lONB9DzMftetdKaR5SS+vH0QUhiWXwy7j1TjYtJP4M6fLinwguMYG8Qbg7NkL4QC -9T7zR5CZPJ0Q/Npiwv7qdMzyL7QklZ9y3YeA5wceyc2/zh0INN5bf4J1mDZjhYH9 -CIgVHSj6z44rWq9L+OzYT0EMDhZO0OeakTWgqXNICfeEXZ5hy3QVCUvKrgmnqs0f -imdH6dZQIGQIQ8Vcg/psk2hEP1hRWROn/cgCdadcEqbMdbtOUuMcnr0K6B/bVbXx -jAV4eVcCcS3w3wIG4Ki2aIXnXrHyEJmZJb03Ko7VXP0NTGuGfPYQj2ox4a4wViOG -pxxbnGGAFqV+BIVlhUMfL9PlatqsI6kUzJIsJUiyk6oPb3KeNQ5+MtS0S1DV0jA5 -wxDQZyEFiUsl6GLYSm4RajxoHdLR7Xqj3D7EWKGt/wIDAQABo1AwTjAMBgNVHRME -BTADAQH/MB0GA1UdDgQWBBRvLFXv6sI+ePP5aegYUWoVHAfRzTAfBgNVHSMEGDAW -gBRvLFXv6sI+ePP5aegYUWoVHAfRzTANBgkqhkiG9w0BAQsFAAOCAgEAK+QAtzhk -ih8Tng9cOheswrbWf9pclMyfl38+NsJxsZnpa2SlBp3qJl0fymyNLLBfyeRUFr++ -x1cRAEwVv6R6Iepj252+U+Cmz48xIthF29JxoC+x2P2YDGyqVBm4uuw54EIF0r0H -AvjTPSNa54gA3+KiK64ypFdlHZrwx3W9b5tUsfycpj2Jrn2HgTbWQD2gaYeIIdq6 -DNmPCJg6NQE9jlvNmVqlBavjc7MJqqd+0+XtCIWhaoqeu/T6g2Epth25cuqPKc0E -rltKiXNiZHcDfFnu7B6kw2LVA6EQdf5GO9JtAaiwhRugp1dJ5rdQqdaYpJngZtvd -8+PSdDZrXow0a1jW2w+3lM5XW3qtzIKJz4Q8CXL540s+SeRjLRwY02OZCvG4fC8c -D57MIFKoReYy5LgBHdPGmx8Kexo7vk2ib9taQCSd6fh0Ol070pNiOnLP9lE9iEqq -EvU1A+0dtPHbfyXqw9tdY18nxXbooypQZSqfxPSq3Bpv8KTsr9SSG+DV2LcJRfvi -OfVTPeIWW8C8SkbEXaTCUVgaNeYqvFsfsvkTmfhO8GHglDgnsveXHfnAwlC2Uxdq -T64oKToV7N1L2RA0JR9gJ4RQwPfyaFOHOPjd+3t4DFVl54GNbNfvELHRReoyJPse -SZeL4h6T3L17FWzugHMjxFi4f1/nPNk7d5Y= +aGVsbG9AbWFnaWMuaW8wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCP ++oCl0qrReSlWj+yvfGz68UQqm6joL9VgeA0Tvc8S23Ia3S73wcTTdGhIQwMOaIuW +y+m3J3js2wtpF0fmULYHr1ED7vQ+QOWarTyv/cGxSCyOYo4KVPHBfT6lYQTJk5NW +Oc2wr5ff/9nhdO61sGxZa2GVBjmbLOJ9IBKTvRcmNgLmPo60wMHtF4L5/PuwVPuu ++zRoETfEh12avtY7Y2G+0i4ZRm4uBmw7hmByWzWCwqrV619BaFHaJUf2bEh5eCbz +1nhF7WHVjBfnSJOgDxmZbKZZPmNzTVm8UxN22g9Ao6cZSxjbFAdpIhlQhAT6sjlW +hvI6b58A3AJKi7zo+a7lnbPIeckduSkgbil3LZ4KxWgx6fPCBLqGH1XN6I8MQnX/ +e1ewiFXwuZMb+FgoKxaQBseuPVaA3ViYefysjvLjP7U9eRzv6qRimOmH5efaplbD +zGhRUKA8GgmN/B+S3ofqDhpp3zz7gFxjkE1f4/XNACqXt79iGaH+EscV4znxlsZj +gUQYAcExpAmKrJg5kmxagHcgu0pVKlyUvSba/kKQ/aYDgdddgPutH+UHs5pssc69 +YBpEXQTG9CMeRh6ZUgcrR0foJLM5g2k53xpG1oTHiJcCKARFZPRpDoZ6NjCIuFKY +6+HMcpFRVDsDnUXmFah9bUhsSQbc6MHHX/iTbpMGNwIDAQABo2AwXjAPBgNVHRMB +Af8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUhGQbAW97KXQs68Z3efEj +55zsc4UwHwYDVR0jBBgwFoAUhGQbAW97KXQs68Z3efEj55zsc4UwDQYJKoZIhvcN +AQELBQADggIBADsy7jhBmwGbOZPox0XvB2XzWjOPl3uI3Ys3uGaAXVbGVnP3nDtU +waGg7Fhf/ibQVAOkWLfm9FCJEO6bEojF4CjCa//iMqXgnPJaWeYceb8+CzuF5Ukg +n/kfbj04dVvOnPa8KYkMOWQ6zsBgKuNaA5jOKWYwoHFgQNjKRiVikyOp6zF3aPu0 +wW7M7FOVHn0ZhMRBcJG8dGbQ8vaeu8z4i04tlvpQaFgtY66ECeUwhTIrvVuqtQOl +jR//w70TUTIH3JzzYmyCubOCjdqcNRYPRRiA/L+mdzrE7honSTQfo0iupT/5bJcu +GRjLHL/aRvYrq8ogqQKIYW0EbVuFzHfb+kPV61Bf5APbA26GU/14XkA4KwzJnDMR +d2wr0RivSceXtY2ZakYP6+2cqjuhk6Y0tl0FBuyQXqAbe1L7X2VctLJMi5UgksVB +q5rdHSJ3fbHRoCUpj4/rSafqJNHlAf2MEE/q8l0D8JhYoN69RhvyFQJLFEU4c74b +XHdFt6bfyxm4+ZzUdj/TXadPAUO1YfQCn9Tf7QOoR68acSvQxEDbChZlJYkdAE+C +zxNcoHVc6XIpk7NIr09qTQ5viz736fV6EI6OIoUaqrz9u+NZ3sPPD2Gf+rOinVFQ +R2Q5kxHYo8Kt1DK0fFcUe1cOZk3df7seQWw1OdJngp5S7gEWBiWg8zr7 -----END CERTIFICATE----- diff --git a/tests/certs/ca.key.pem b/tests/certs/ca.key.pem index 2d73448f..de814c10 100644 --- a/tests/certs/ca.key.pem +++ b/tests/certs/ca.key.pem @@ -1,51 +1,51 @@ -----BEGIN RSA PRIVATE KEY----- -MIIJKQIBAAKCAgEAyprtuNvPDrNt1bUX/JKee7I+/rPj0g//PiomYieLMuSq1FFY -o1UIHwqcT86VlrUP+yW40DLfpGaicCqnc1wGPKLJ5Y6WGL2zVp3NBmWNPRes/eRq -lYF//NRXvFtyc+iPcEtgl1Z3ihrbsfun87Sm6wNm9ZJ+vW98QpRB0SxWZfYShvHJ -Nh/oCOQBA4EJoUyF2qZfOWz7oeeLiolBp3nT5NO24gsN2RORkxlsxKuajKhRH29f -fy9bzr4Mf0OGqpoycJTjQfQ8zH7XrXSmkeUkvrx9EFIYll8Mu49U42LST+DOny4p -8ILjGBvEG4OzZC+EAvU+80eQmTydEPzaYsL+6nTM8i+0JJWfct2HgOcHHsnNv84d -CDTeW3+CdZg2Y4WB/QiIFR0o+s+OK1qvS/js2E9BDA4WTtDnmpE1oKlzSAn3hF2e -Yct0FQlLyq4Jp6rNH4pnR+nWUCBkCEPFXIP6bJNoRD9YUVkTp/3IAnWnXBKmzHW7 -TlLjHJ69Cugf21W18YwFeHlXAnEt8N8CBuCotmiF516x8hCZmSW9NyqO1Vz9DUxr -hnz2EI9qMeGuMFYjhqccW5xhgBalfgSFZYVDHy/T5WrarCOpFMySLCVIspOqD29y -njUOfjLUtEtQ1dIwOcMQ0GchBYlLJehi2EpuEWo8aB3S0e16o9w+xFihrf8CAwEA -AQKCAgEApJFdgOdCc415LLpxJl4tzwnEs3yJE8qcp/Dyxo2aOpeUzurYVasu8o/a -0dRam1StC3HjgXGhSNd5ICT1aPWZt0z/M7Ay6RvFfRimPYjlRXdis8QCczgCLuqH -7V5WRCHlyO/hIGxCovIX+6UPEhxt7L0Rt2zr95GD3EyyfWZHM4DCIcxphMY74mTZ -EfCRUuxmWWkENg/5ANSj+r5sjs2dOORjS45xDB8iAtsHB2TgH1pksmTzq8pbBz5F -xmWiEBc520qEocDyVaS+KY1z81OuGiPebhBRGmtQW1UcPaq6a9mN26xSsqKONbnv -++1pHHqf/wsXu+IoaN/cML1B4jDDf1milC7mmgPdETQjbco7PvSsxzG3pZktijoT -8WfCMda4SFgkLMDEKyD5tyUGQFsvijXFf9y+/V0ux3u1Hm6NApDXTf7gX5W0b9tD -uiupzcwCtA5s9AO6G0bQnddwzFGh91/ydyc5DfaRjfrG95zYouwqmMQXTqYG1USX -mLrDgHw3ierlwVWKUR0OnysMeNYtu5782RO3LSdL126PKLd/pLvG7FrETLFECP3B -QgM/vKlNY26mcX4DuALRRLWu+ORrGMclEp7Bw/JPTkFxj2gLrmL6JM1h+CFXDBmk -pE0Cl2PDCVq4aFWZDn4F8ioT4XW/2REtxp7E2wazNnCX+IUap1ECggEBAOeXY9Ib -m0GayJVm7kvvL6pY2e/lHlvi44xcTG3GrkOn/qMLIDkXvUyfjcqHZQhMoYhnYx4K -iyK4D/Mej4Jbj5dyRKHEn8tKGuDrlzFp0CLRQvg1s/LcktX8hdef9IPXHA3y6ML5 -X60KNN1PI/7aINEENn1qOqDvU6X9ST3VGAWbfyM5jOZDHIBkjJuJTUwndaDbIA09 -AqxqQjq6UntCG+seXBmE1OHht++pWgN5rlq1wJ2KJlGR2HdhtIl1JyfU/hisnfFD -ahQMUFoFYS3ecNUNumbQEBaZ66/mHP0p2YhaLK3j3shC8vsN15LOW6Ulzlmw7I3s -tGqcShUaldjQYvkCggEBAN/1dQst70hWLtjRnP/0FidKtq3l2u0Lg6+K7CUsIOEa -QH1s0CobT5j7eWtodPkZkYCzulhiPXk32mW0uKiAglJ+LPaU7HgNrFlJKefCrStP -o8LcdeZujRhBkBvU+xytoxpKIhdie4td106sRCb63F66MtU+dSJqEl6/5Piz0zLT -YgrFitRaRA5/jW47BUV4ZBRnHqrBN4PhoaYPp7oYIue6E1G+REdsL9+I1B1PhUV2 -vmVHvoQkwqa1Ne9AZg1ZmTbnSojKV1c1T/uwwW/UEDo6v3+qMH/wTpXMk7DIE7ih -NW/FADYRHEd1M11zxLOMmq43C9/KD261N97H17NP3rcCggEBAJKdgzJ3C7li1m3P -NjmYeWKs0XxQXwHpCAnKPRCaYaSvbEOoPYQnhU5HDKsVQF8atID4gwV3w1H9mQtf -Y5cxhBxq2QxYwJkglxehzpwX0w7X0D/3L68m+UbDkbBKsa/ttPMXv0gAPBP+jC03 -dyBW08O/mQeZAvjzys8hJQciKw0RvlF8k7kK77ZQ8bteFzOJH6zwTMBUyaaBtuAb -KTCjT61wEPqO338JOTteyX+9vyXqPsD9vviRDqu1jWggZOOQsjTIw00EUtnSWeRD -15wEYQZgpIuGWUkVtOItGlkj73WlMPf9dQLvb4iE4N8uCVLqNlMN8RSAsE92Fmh5 -5jfW5XECggEAQEd5En5aoU5rH7v57dSmzxw4lmzUixi08RtUb87cmP8p51Xl4U/5 -ZpU24kcW27Ak/OWY5Gk9757CRlK6dVJ9FSQ1z4gq3sI951qCdox/m2C+Rd100XCF -eqLGs9ZLRI3ptE/2vPN9NiD2/ROgc/eobF/Q2zeT8w6yuxMkquUiBwJ4r1LHZ++I -fQjLFQpHlwrY3qpCOQw/3NBTzw/LOjRXQF890EZl3oIEs4nYJ5l9TNSqDPOskMzk -OWjlVAgNwmMnAIUd9Wjt7I/WpwyyWGBrT+swr3mvdekJBSG0ehbS4jkS10OZrer3 -TOMsnPPvTwFaHAqck9yw1TuaD40YMdUIvQKCAQAHpX7JP3Qbt7Q+hzq66BVWwlp6 -qdKKjlGGB7ciiFwuZWRI019ilbmmOjCfvFuVh4pyZgQH/TG/9HnZPBmuXd0Jy6VJ -SIQWZQ58G3SmIFqXZYA5Gxk2u4B/bPmptfPX/zxkaSV83dQu3L0PdPVnCTzv1qDn -MdCMbq7K53zF/j05tWRdF4iey64pmoBZx7G3Ky9cwdMsKTm/7AHi0UBTHwGCrDFL -BDS6XW1ylSa0QJrd2+yryae+N0iYXA+5WmY6yuLkUrGXcf96e3ufrs73di5R10IV -D38YeZHQEIK5gmfWC9Ma5HZb6TB/CtweirY4IddUiPEpHJFmOV+TkGBmntF6 +MIIJKAIBAAKCAgEAj/qApdKq0XkpVo/sr3xs+vFEKpuo6C/VYHgNE73PEttyGt0u +98HE03RoSEMDDmiLlsvptyd47NsLaRdH5lC2B69RA+70PkDlmq08r/3BsUgsjmKO +ClTxwX0+pWEEyZOTVjnNsK+X3//Z4XTutbBsWWthlQY5myzifSASk70XJjYC5j6O +tMDB7ReC+fz7sFT7rvs0aBE3xIddmr7WO2NhvtIuGUZuLgZsO4Zgcls1gsKq1etf +QWhR2iVH9mxIeXgm89Z4Re1h1YwX50iToA8ZmWymWT5jc01ZvFMTdtoPQKOnGUsY +2xQHaSIZUIQE+rI5VobyOm+fANwCSou86Pmu5Z2zyHnJHbkpIG4pdy2eCsVoMenz +wgS6hh9VzeiPDEJ1/3tXsIhV8LmTG/hYKCsWkAbHrj1WgN1YmHn8rI7y4z+1PXkc +7+qkYpjph+Xn2qZWw8xoUVCgPBoJjfwfkt6H6g4aad88+4BcY5BNX+P1zQAql7e/ +Yhmh/hLHFeM58ZbGY4FEGAHBMaQJiqyYOZJsWoB3ILtKVSpclL0m2v5CkP2mA4HX +XYD7rR/lB7OabLHOvWAaRF0ExvQjHkYemVIHK0dH6CSzOYNpOd8aRtaEx4iXAigE +RWT0aQ6GejYwiLhSmOvhzHKRUVQ7A51F5hWofW1IbEkG3OjBx1/4k26TBjcCAwEA +AQKCAgABseW8zf+TyrTZX4VeRX008Q0n4UA6R4HgClnBDz12T94Gge8RHJdYE+k8 +XImXLFTkWA8uyEispSF7wbnndLDH42D1RmVarEHnsb1ipv6WOy7HGFLqvThBWluX +783yH4oe/Dw3JcIIcYcbl9hNjD+iR9jUu8eG057w8SU21wWEPiOHmVntt80woNO6 +ZKeD2mRCGZPy260H474O2ctE1LUsXWYMhx857HpusvTEs90r5mXDcetjpjo8cq7n +sDukLm1q9m3hCNvbezQ21UxjmHnpK/XDXDAohdMWG/ZBMmz2ilanvhITVieGLdAV +ehBi8SEqqxkD5hd9l5lxTjbRmUrdRZilnUKqup9WcOTQYeAZ2WAazyYuFqWAwSf+ +dU+SzMTG+7ts9y4RbnWL9H6hN2GWMeNdLRVqE4aECMv7kAIJZ2u6VyNXSEoVueBM +CJ7CU075QgxNL1REDWRBaUaflBhdwQFnMXBULw2E01KZFmQvZLe06SI/xjkB7oGU +HdqWRDx0YP8lrFG35ukA2t+EswJxcbZHsagEdrz0jjz0a87vjgHnff1XpowhZU6M +4OgtQpoM4t4O7xg/sl80c0WwVvsOHVkGwUARCfZ4F2fXnocpYOCWQQbsA/SH/qJ8 +l+ChM4XkBNzKAUtpwkozqisKURJKTAJyeuAKD4fXRX/IwcPUYQKCAQEAyp1iiuTX +pXzDso+3WPxLr3kwYJSUxpxSP4EjZZvzJoVflFBttUOoLURPEMrK5tEqWHqRrJto +73s3yQt4xWUtUql5eCB69nIVjseRhsbXjNzMIC41u65aflfIqQztHzF2gdFMZh3I +gBp87CzKHSf83ToN3QZtQxIvuPdYdxDIjCMHc5hgRSLNKGhKXs1qWA76ASGNwQKW +7nUflWfDG3yZ7sWtmz7T2djz2zsmmzppCRRVjHAxQWZ+TxW+KsBOpGzgNvteUese +ZK2ARc6lLSdgS74J5U6j07dOzQZ4eVC/OPHAIbPZxJAZ7/waP7YM+h+ohU+G8kXL +KevnXjsC2oa/FwKCAQEAteoHugnwXvl9VyPceGQeffmQIq095CoD35UVlq60yR/9 +zgGN8mrXuEgGyydCYrK0/pUYb1pQhk5Xy1D6t5ou44uYlGuksWDqquRwgl7qMMVE +0GAwm+3wUmz7u5XD3uEJaGWV+gbvg8Hbvl3V/MzjlI4caAZ3lcNaX/Jf3xG6Gyfi +So0iQzVMN6NR7m+I32YFB3jxu9PlzUTEj+9SCHuERFAozuzwjdLwiYjNMzv0zPWj +v3ERO2mX6PE6yN1XkBsCGGG9qVz/ZzvKOz8Dl4TryY0a5eg4QUEZ3nUlnpq9/8M3 +xcN6M2yK8XLbTmVhSHX2J5nVI3s+BTbVHBoO0edl4QKCAQBcmMbTUThYkgdh0Jpr +WYpBXHJGgUDo78IK8bq6kiXygdunjYZF4/C1F1XHB9bo28itfP6cUr4HTFm3UL3W +AKJQ99DinH11qbe+c+hHHxKddr73Kgc2ib0jpny2/YhUzCcrtvpiZNQf73sN+H46 +Cu9eL0zsqSZAE8ypjKjqaUot+UhLhOTiU8BM6jSq1Nf3/Ig3Ah2lishtnCtd/XjG +VBCJdeAcZf8tvR/dHlBLestL8fYS46cvC2dIP1iUcyS9smBZ4FE/wOM4Aa7wuDr2 +wtsYYnZlTKZEeK7TtlRSpRtvK9Sx0l8AnRatfZqFaW7O1K8QlcLHcCwkMYKgpvlr +407rAoIBAQCi5nqa1xGgCux53wwr5wQDLTssQlS8//7N9ZQKhlIwFOzT0EKLha+9 +PwqOW46wEXXQ0DS8anTXgEpQMCkDxxcb/sLYjfhCOxaJh91Ucahnmg+ARdLhn1Xo +id124qsu5/fju6xs5E8RfsTHmQHpypQ1UHkRklD+FJzWdJXzjM1KShHzTqUS6CRj +YmYZDVnVK2dvhJd76knL4jve5KFiJTGRdvLEMhtL9Uwe7RlMOvGBpKpI4fhbarh1 +CafpfYRO8FCVAtmzUysHB9yV51zRD1+R8kDXBndxv9lpgx/4AnwID4nfF6hTamyV +wJOwhUpzd+bBGZlql483Xh3Cd3cz8nIhAoIBACs/XIDpXojtWopHXZReNwhqPC1D +q3rjpPrZ8uqDu0Z/iTTO9OSvYaMBTVjXQ7w8T3X3ilMr45kpsHx0TQeh3Jbjy459 +S9z+6MtSIM0fbpYBEfa7sirDQM/ZlgZjm7vq/4lBVFGFIw7vxu4m/G0oHtihWRKh +ClGG1Ypm00srgWihhjtRn8hfnLqCi4t9xxW1q8Te01Gem8H0nfNKfs5V8O4cKIZa +izrfne/1Fto1khYFTlP6XdVHPjvl2/qX2WUz4G+2eNWGQVghC70cuV8kiFYlEXVp +a6w2oSx8jo+5qRZrMlUQP5bE7dOBvZuoBmEi/FVfRYuFdxSZ3H2VAZKRgC4= -----END RSA PRIVATE KEY----- diff --git a/tests/certs/gen.py b/tests/certs/gen.py index c08f3061..19203840 100644 --- a/tests/certs/gen.py +++ b/tests/certs/gen.py @@ -59,6 +59,20 @@ def _new_cert(issuer=None, is_issuer=False, serial_number=None, **subject): builder = ( builder.add_extension( x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .add_extension( + x509.KeyUsage( + digital_signature=False, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + key_cert_sign=True, + crl_sign=True, + encipher_only=False, + decipher_only=False, + ), critical=False, ) .add_extension( @@ -88,7 +102,7 @@ def _new_cert(issuer=None, is_issuer=False, serial_number=None, **subject): ) .add_extension( x509.BasicConstraints(ca=False, path_length=None), - critical=False, + critical=True, ) .add_extension( x509.ExtendedKeyUsage([oid.ExtendedKeyUsageOID.SERVER_AUTH]), diff --git a/tests/certs/server.cert.pem b/tests/certs/server.cert.pem index a4678151..9d2af274 100644 --- a/tests/certs/server.cert.pem +++ b/tests/certs/server.cert.pem @@ -1,39 +1,39 @@ -----BEGIN CERTIFICATE----- -MIIG4zCCBMugAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNVBAYTAkNB +MIIG5jCCBM6gAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNVBAYTAkNB MRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9N YWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5jcGcgdGVzdHMxHTAbBgNVBAMM FGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJKoZIhvcNAQkBFg5oZWxsb0BtYWdp -Yy5pbzAeFw0yMTA5MTMxNjA2MDFaFw00MDExMTMxNjA2MDFaMIGEMQswCQYDVQQG +Yy5pbzAeFw0yNDEwMTYxNzIzNTZaFw00MzEyMTcxNzIzNTZaMIGEMQswCQYDVQQG EwJDQTEQMA4GA1UECAwHT250YXJpbzEYMBYGA1UECgwPTWFnaWNTdGFjayBJbmMu MRYwFAYDVQQLDA1hc3luY3BnIHRlc3RzMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAb BgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMIICIjANBgkqhkiG9w0BAQEFAAOC -Ag8AMIICCgKCAgEAwvenCzhPXe+m+QEOdqK1YRnhKKGAeRo0oV7BfDAwhrgrnc2R -kGg+T5liQYh3ddj13LHPdLehhVz4B1tNkfZPLSeMDwjU8sNRWkdiAI3ZHRmVIVOh -Ru4BRzI4WqdZpa5cImlFaUjtHa/+w7ekHnllwodpbjH4Vgs9LWQiH8CdTVpj2clq -H78ZShlRvLyjo6OMQ6fbxAFtcYDGHwhR7JZ4VeCBm40O0Fl/c0ckmOtoYd1BTYX9 -RgIzTt0oV6ZiUH/SKRdYyb9GPUlfm0URK5j5MZPn10riACnaNEHytEREQEkpHWiD -RPcmlRCJarg4zhObuI5f6kUX9R1XrIKY4SAyDKzoSdxRFgYEWN6HyfylakU5LFnE -4ZAgihbzuFG4fGOf88F+KqaC6yvz/mvgxB8IPSDaILE37gGuJUTGhDGkKAVIB5Xb -WWR6e4VJcnmveu1z5+M6jwTR2+61y14h3WfACZLbAdPW1ivr6kjbaXlN658NEA1G -I/5eY7kVFAapoGdLOWlI7iXLGHrORLL7l2nh7+cYnHGPT3e5WHJZ67a0Jvtv0K/5 -dBgs2gwB+6FcXe2foKAmQ3/B5rAmshtb/0Ya4wRCglGxXgQQFCZseT5TAJhhHwbB -yqVFOgzvYSFw7gXQcfxfxf0LoUYK2O7WwqDJyargkIMDTZfaL+7ht6pfSmkCAwEA -AaOCAT4wggE6MAsGA1UdDwQEAwIFoDAJBgNVHRMEAjAAMBMGA1UdJQQMMAoGCCsG -AQUFBwMBMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHQ4EFgQUE7Na2Y9wLTBC -vxuoQh8lHF/wSR0wgdUGA1UdIwSBzTCByoAUbyxV7+rCPnjz+WnoGFFqFRwH0c2h -gaekgaQwgaExCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQH -DAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFz -eW5jcGcgdGVzdHMxHTAbBgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJ -KoZIhvcNAQkBFg5oZWxsb0BtYWdpYy5pb4IIDAM+rFY5KqgwDQYJKoZIhvcNAQEL -BQADggIBAC66T8P6uZEa/0Gu7N58hHM5wZmWAgY9CWyexqLwhot43oQw53TyLYvP -EhbE41yB7kuqvwFvZmUP5/f2LiqrXbl37/vJyITr7oPmZ4cYRHLdsVOix8W8GD5e -lwrcqbudkulIcINBewfOtEuE2IkVtRSK8GzUvKcDpTOhmC1mHK/DtmgMvmGwazHy -fIHZjcUKFdOr1WZ7X8wnnEfP/OcYsizNXjGctfun/r984PhxwojoP/X+r2ycXhrr -X31m+qbj5QyarNxaje3LDA1IBCKSVYhwEHhZgXV2NBuUJYr58u053u2CcxNvHlMS -rNflhiB0MWpbTZBUBR/bnHBi5plt6eyABV4xZfslQCGisc4zWYSZqXa+HYgpn9Ja -NNbZL6Pj/hFlZg2ARlDio4KAQWjnQlS4e7U2vJXPbI/tfCMpNk+PQ7fRZFCRjWDh -OtcejGna2rBtXIHf6yuV8ultyLdIm5FqPhBE0eRisfWjhEGa2UG7IeyXs0+muLsi -n4NrZgYogo8ADOCiQtH0Z1/ropqoXlptNr8XJYYhz8rvIRXfwLqmqebp3gSD92Hd -jt4dCDmHT8ai9Inn8MqGqTtU2TlV4rba6WxNoiX2z1xbXw2kGtrdlxaYekBK+DGl -8ky4IUinTi0fUrBxLtxpPtztXPArvXSRiRTf0hRtS7v0QI9VuwyV +Ag8AMIICCgKCAgEA3F017q/obCM1SsHY5dFz72pFgVMhBIZ6kdIInbFv7RmEykZz +ubbJnrgwgYDO5FKGUNO+a80AbjIvBrtPtXs9Ip/QDg0jqgw/MOADCxCzYnAQ2Ew2 +y1PfspGtdPhLNTmrO8+AxU2XmjsYY0+ysgUQQttOs9hJ79pIsKGBEES8g9oJTiIf +tKgCxCIuhiZC+AgjeIQZUB9ccifmOGrCJYrD6LBuNGoQNW2/ykqjuHE8219dv1hV +do8azcp/WmejjQguZyU3S/AofnyyNE24rWpXbbFs+9FFaUXd8g/fWCwrRmcXpOaE +lvkmMZyuT9kuglHsvpzzGGNSUpvVoPfldk/4JY/kJrA2G5pgTX6mGRYGEN0jmlCa +yg/ZFn36G0mA5ZBH4Qln+lKUSjJH8bhlFXvXlE3Mc34OCdOAp1TRfOT/qCRKo9A5 +KCjVOvG5MAKE8TZnTFLCSx5gK/EdQ2iV7Sm3aVc2P4eEJh+nvv1LDVLQEAak6U+u +sZN5+Wnu7wDKSlh80vTTtoqls5Uo3gIxHYnqX5Fj6nwCzGjjXISNE4OKZLuk3can +mciEES3plUrut+O6a2JWiDoCrwX4blYXhtL92Xaer/Mk1TSf2JsmL6pODoapsA0S +CHtpcgoodxdKriy1qUGsiNlPNVWjASGyKXoEZdv49wyoZuysudl1aS1w42UCAwEA +AaOCAUEwggE9MAsGA1UdDwQEAwIFoDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoG +CCsGAQUFBwMBMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHQ4EFgQUO/cXg1uX +2oHZodbw6F3/HakLdaQwgdUGA1UdIwSBzTCByoAUhGQbAW97KXQs68Z3efEj55zs +c4WhgaekgaQwgaExCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYD +VQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsM +DWFzeW5jcGcgdGVzdHMxHTAbBgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0w +GwYJKoZIhvcNAQkBFg5oZWxsb0BtYWdpYy5pb4IICJCUmtkcj2MwDQYJKoZIhvcN +AQELBQADggIBAD4Ti52nEttUNay+sqqbDLtnSyMRsJI8agPqiHz6bYifSf530rlh +qlHYUY5tgfrd8yDZNIe9Ib7Q1WQjgR8c/T9SoFnLl/tff1CVOAYQ/ffCZGTdBOSc +KfdKEEvObWxWsqv31ZAMWVzfPsF7rwbTbZ8YdH2CNjxbZxrSEn2IrjplsoP5WMsE +6t7Q+J5wpi2yiEI9PoY2wH5WBB8ONWvZfj9r6OrczlTEZ+L6eiip5kMiw5R9EVt6 +ju2aMWqbZTI49Mu/qvXRAkwYvX7mrhuW/4mPHOW/zSnN7hOyjntx1fdnpPD5BTT6 +CoJ7nhWgnntw2kk2V9UBCYpVeqidDRrs+nr1xSpduuM1ve3SDkIpd6EGEUqZJ12s +5xpCUFK67atCZOXbJXqanm+3N9kbqYuwkWoqnPjOfMYW7oABmUy8elVGGwTuiTI0 +sXS3aQJ+Bm7oqSXrIxUTjOUUaYNhhaqZdXaO/29vI2+i975Pt1ZLLPUkp0hsUgTT +kryN02TlNTxxQafTWad6YdzyrwvMpV7vxf7JQkOKRwLinqLCDVxjBt66O9mLIpQF +WIfWQG+X4sgobB0NTtBWeGkrIgnhUtsT0ibVm4JAC1cbxdLOq2dfcURC8UFWJXok +yFr/uaDZiKKbUFXbalZwnx6H6ucfl5No3hheexadyIbPNcHhFJ9zGXot -----END CERTIFICATE----- diff --git a/tests/certs/server.crl.pem b/tests/certs/server.crl.pem new file mode 100644 index 00000000..4abe1644 --- /dev/null +++ b/tests/certs/server.crl.pem @@ -0,0 +1,19 @@ +-----BEGIN X509 CRL----- +MIIDAjCB6wIBATANBgkqhkiG9w0BAQsFADCBoTELMAkGA1UEBhMCQ0ExEDAOBgNV +BAgMB09udGFyaW8xEDAOBgNVBAcMB1Rvcm9udG8xGDAWBgNVBAoMD01hZ2ljU3Rh +Y2sgSW5jLjEWMBQGA1UECwwNYXN5bmNwZyB0ZXN0czEdMBsGA1UEAwwUYXN5bmNw +ZyB0ZXN0IHJvb3QgY2ExHTAbBgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvFw0y +NDEwMTcxNzIzNTZaFw0yNDEwMTgxNzIzNTZaMBUwEwICEAAXDTI0MTAxNzE3MjM1 +NlowDQYJKoZIhvcNAQELBQADggIBAEVNX72KK6etoZQOXzPgd8ZJNrYcsOwjNZFL +ZxC47uX+yrxjv7Wrrk4feyakFi5bL9n8/JMggcpxC6yxMQH/sdOZJ0BzKw3GUAxj +m53i1GGO1lGdKH5a7uDPZVW362JwCVE81ROCdb1SL/yYmIwhD4w2bqjOQuI63Xe1 +MDfVZBqcIwzzkA5PEjTSFQIsBcHU+rDrWggkz/XJh5alRav8Gnj7KTE8U1z5UeKV +LUk8L8+ZLW6XlrTnyjOn3qT7sZw2C/R46GCyHWwT5tbLhJhm2u1EuX3Iids02vIP +w9bYf7+Uu2lsse9TuFNXtW0UFLdvVezomHjNBCaMI/MIvG4wSWnAo5bTtlowzxSy +7rpQQYBebcl5somUAhHqs4dsxbEwCXMPDdapiXkhxR9R4nDvkfsgwyqIRsWsIEq6 +PFjjRySNFUg5/vqhVQrg0hV7ygzXfd/kIlud3ZkKnli51TuFMWKD5sMN0r8ITLdG +usoJQiF6G3ByLQBnsiQoHbipWkWTOKmfB/cfaPXdagPZH6rQmJeeNq0vBy6VqbFi +7D+BqABs+yIT6uJEEqyPGJttkUZP+0ziaK+DZF4MgJtiERtz2GjKMeh3h/YSqA27 +8El6na7hPA3k1pANkaOaKuxZYzrPsl3P91ISGL6E0dgd6f9NZMOxbhfNKoDsBJnd +Hjb3RTY4 +-----END X509 CRL----- diff --git a/tests/certs/server.key.pem b/tests/certs/server.key.pem index 9c69c46c..ea9bb94e 100644 --- a/tests/certs/server.key.pem +++ b/tests/certs/server.key.pem @@ -1,51 +1,51 @@ -----BEGIN RSA PRIVATE KEY----- -MIIJKQIBAAKCAgEAwvenCzhPXe+m+QEOdqK1YRnhKKGAeRo0oV7BfDAwhrgrnc2R -kGg+T5liQYh3ddj13LHPdLehhVz4B1tNkfZPLSeMDwjU8sNRWkdiAI3ZHRmVIVOh -Ru4BRzI4WqdZpa5cImlFaUjtHa/+w7ekHnllwodpbjH4Vgs9LWQiH8CdTVpj2clq -H78ZShlRvLyjo6OMQ6fbxAFtcYDGHwhR7JZ4VeCBm40O0Fl/c0ckmOtoYd1BTYX9 -RgIzTt0oV6ZiUH/SKRdYyb9GPUlfm0URK5j5MZPn10riACnaNEHytEREQEkpHWiD -RPcmlRCJarg4zhObuI5f6kUX9R1XrIKY4SAyDKzoSdxRFgYEWN6HyfylakU5LFnE -4ZAgihbzuFG4fGOf88F+KqaC6yvz/mvgxB8IPSDaILE37gGuJUTGhDGkKAVIB5Xb -WWR6e4VJcnmveu1z5+M6jwTR2+61y14h3WfACZLbAdPW1ivr6kjbaXlN658NEA1G -I/5eY7kVFAapoGdLOWlI7iXLGHrORLL7l2nh7+cYnHGPT3e5WHJZ67a0Jvtv0K/5 -dBgs2gwB+6FcXe2foKAmQ3/B5rAmshtb/0Ya4wRCglGxXgQQFCZseT5TAJhhHwbB -yqVFOgzvYSFw7gXQcfxfxf0LoUYK2O7WwqDJyargkIMDTZfaL+7ht6pfSmkCAwEA -AQKCAgAujTM1WpyYsUAM9FOfv/nO1X8NVIJ4Z+lpHlbUcC0l/ZNsekjnUfyOxPDQ -9OSRHtyVdV8zXyUR0sDmAMbkswr0nRyz+kfeLwSdqa2ctEHC0PjqnC1F4k4r0bHi -81JUXO1iyf/ow6DaFcuer5pgLFw/tlVWGlhRMx3IWMBNFJB6h7qPpafRLK+9IY6C -ogfwanxzKwEuK6kWEMk9X58v/j19Q72uhl+jH7tuqu3yFUM3Gr0c5YEz1hKqIeQg -CXov/lUPuqNYiHMc7wgE6tjOsBfP3qDcpuSPZW7US2rH4ATr1IwcmXe+X8S2ktw8 -vv/RNJ1Z06TTKuwtenQUnJokJqvMMESqEHdld5wwDo3MxCqvkcSUeS22cKlBZjeF -8/5wqpTMVpWxE7kfZFsMinBIV3gRPh8v87aDjrULJYltLQ6e8Pd0sAO0x0jAby8H -o5mjPSjHsK0m4vJyNB0paiWJcbRMQXpKX7U3smXxxAqWaqRgkkXk6wGICxX2oV34 -T6tvQ7GPCqNR8wnnXDx07imcHGAMeT62Zo15DrupP7eRxtIaO+f94HQiM6aIIcDv -kXyNZP0B1THj1C9eFy2hy6yvVOv1ZTtaXSXCOcY5dstDDKKZiAs2JTgcMtT5AZ7H -Q0JZAulk2AIeLHlNktUOZeYAA7nrJVS+PhsPcOep5N9CeM2EgQKCAQEA5P/I9jv2 -ZLfzTLJq3h9t7HMPGC/u+a7xD6ycTA4h5LAaUoMibkMobG7Sm/idNwvjUK3eobpz -KV01L5B69Om0GlaCn1zyDvPiZ+Z6COqwlHMIQcNR8ohNyGMIzkngJPh8RJN5a1a+ -NkT+lAsxAZx4RUWOs+PboTrqy3YUWQZLbTK5k0nBoAwW6V7PmdrjDAz4AU3nabQ4 -9JXacMd1gzB7/VWFt3rprR39yfmTrT8vR1w/DRnWmYpIx/DZ1MDvkIeWdrzFakyu -ah8HkW+tFB2BajnXfD+GD/L2sdEhez9YVjv/enJrNrsPRRk6yJoUTydkqPejBOOz -DJTfdQknWBnFKwKCAQEA2fSjGu8SS/Ah3DVAdhczX8YDHfJm1ctY5/uGG5TQI8Fn -pN6jBt8e7a5Aqb32naO73xYKQipiJ30Diqp5bPXv/4SJB17qCJvvoIlWXmy6ROnC -a7ndAR7N6VgJHf7uvuPa9CCZQkpP2fG3MPJXAfynVPY+D/xonZBV/o9lioBGEin+ -ENqVYjb7tX7h0Of/IbCzbTMnmEiCaz3Mm/8RME9Mh8BZfbJTUk9Sb/Q6oTMwMd9H -GcsZj4XYbxYGdHA28mFlZoIUdDesd8ZUWka21U6GVdz4OJtfoI7MJdqRzt7uEwJC -UixWWQn+LFpNFjKjKnhFFc4re52MvKB90R+kWErMuwKCAQAp2ZkXbwPrijabmKux -JltHcAudJv1sgg0qCSiTOa32Bjz5QV/keL+FeYdh28CXk8OaNfxO4C01rQQQSj4+ -TguNGKxMhYbzNpz00gkRYhqdNpyWsCRkx4Y3jenJEXU2LHdBbRYuiK7AakGAOr9d -BQRx3HFk7Mpxn7vTLSQw1NaqATAq+7q4Dh2Nzrbv7jG6PRCB5IPbLIWQJWbDX6BZ -Nl4igSOr0XmtGqML62GSss5oIzKeqU8vxjbg22Jj4FKnvi/ASWVmtNbXLA6NBLTD -zVSeXi3EVjOg7I0rGAYfaQcy00owTYLMgMkcnqzAhnAZuyBJROB0/0v0i6x+zgpz -rln7AoIBAQCHK1TMK2ApgC8/pjboBdNynhbox6BHDumAVVkCWfQLeLKSaRCp/k3s -EZlAq/L6KMUmwUBzcF2XJ8y+fqL3lD7XNJbW32I9HJgr84CA5uVOP7q3nHkXbMc+ -474jwCrIb/8mT+E8X2HORD3cOS8EqHAOHPi4aU1oCk+Ko9vRXWQXd7t9MFJcqsTH -9nyNVpO/jRp5qrPvmWhoodb3F+TNFSDdP8lATwuljFQP4mNJ/bjx9QrfUDn17Igh -vIMcS0uIXibIv/t3Z9+qGHHP2vMgrqZZMcUvNgzEQksRXs/2gAMd/tSqqZyTc8MS -Np6AGb9fY19U+pu0+iyB/vaIbxs5NoppAoIBAQCdpwKUiaGERg7f6I8f3fy+vvYQ -RyeNbizFKSEBwyE0b9izImUuKtlOqpK2XbuFZkEXVJ8juWU/7YurMIsBdosFegPu -qxtLEq2AOBtxxRWsLWZAaaesLh6MS0YJ6YjibuK1ITfiKInIkXdc65TQ6BXXsZme -4tQmnCY+C70iG5Xnt6ImH0/FEgnyBbbTHYvFqPTxDFy5Xu0cbtRgEu6rFK5GoYur -35BGoV1tYa50y3dHR79cDYp5sPM/qZ9teEnV++dQKCRJ4oOcGsYBHqc6tEjCLWpv -ji6ZAgx0TbI3oQtECNdpT2cSvYRdSrKQth7fPVo/FhLMrmc6d18cnZswXNYQ +MIIJKQIBAAKCAgEA3F017q/obCM1SsHY5dFz72pFgVMhBIZ6kdIInbFv7RmEykZz +ubbJnrgwgYDO5FKGUNO+a80AbjIvBrtPtXs9Ip/QDg0jqgw/MOADCxCzYnAQ2Ew2 +y1PfspGtdPhLNTmrO8+AxU2XmjsYY0+ysgUQQttOs9hJ79pIsKGBEES8g9oJTiIf +tKgCxCIuhiZC+AgjeIQZUB9ccifmOGrCJYrD6LBuNGoQNW2/ykqjuHE8219dv1hV +do8azcp/WmejjQguZyU3S/AofnyyNE24rWpXbbFs+9FFaUXd8g/fWCwrRmcXpOaE +lvkmMZyuT9kuglHsvpzzGGNSUpvVoPfldk/4JY/kJrA2G5pgTX6mGRYGEN0jmlCa +yg/ZFn36G0mA5ZBH4Qln+lKUSjJH8bhlFXvXlE3Mc34OCdOAp1TRfOT/qCRKo9A5 +KCjVOvG5MAKE8TZnTFLCSx5gK/EdQ2iV7Sm3aVc2P4eEJh+nvv1LDVLQEAak6U+u +sZN5+Wnu7wDKSlh80vTTtoqls5Uo3gIxHYnqX5Fj6nwCzGjjXISNE4OKZLuk3can +mciEES3plUrut+O6a2JWiDoCrwX4blYXhtL92Xaer/Mk1TSf2JsmL6pODoapsA0S +CHtpcgoodxdKriy1qUGsiNlPNVWjASGyKXoEZdv49wyoZuysudl1aS1w42UCAwEA +AQKCAgAXD9TfxfPCXWzrsJ3NGhPSr9crpvzYRw/3cs5esn3O3Sd92SGuAz3WfoWV +CAX0SdlaBs7xjo1yUDjbsNQGtNRmaz3lj+Ug8WcrlkYQl7mDnnbPgX+6h8HsI5LO +SwM+mWpyN/p3Vkd8vJ0wx4Z2sFD4rjruV2m60FK11DEi+A6X6JmmCQGIcTeDjzrk +jzHdrfxdqyAlt80qT+1Sui7XVE5sa7Uc3HzAcAaXr81dNXyeThIMPxJdS1y4F258 +kkbA27pU0Rrtt5SFUvIoxyQsrJRkcSJsDYVWHxm7MNi5luXF2G7WXcmX2JCcCz8I +MZJ3JlvAbGyEgOB8r2e2u5AoHEu7xjpjJ0/6smmig7LDe96uNpg6zDwS3xl6rAup +qgwJ5TTwY8BydVOtDqe5Na8yqLtwMr0yA+k2Hz856mzCTJEOI9TaOq/jtq+n4AXW +lkBai762oVKSKYCVJSK6eslTf2bAqjT3jakbgqJLKmMo5XvCnYUWWIve0RhQMNT4 +0tiLCxKurYa7xPqgW26c/fEHvdBDrU1JAablcAjsW9sJ+KIlilK02M9DqF0RnBBI +wK7Ql76ugsYbp8WBXkpFjMMyciMhqH8xJiyi7MuiCwpBGQwxBHHaX7f9OqDWOClR +mVGjrZuk9oiI3waUjGG50SzLBlMbeIzMdXgRuM7fByq6DG0VgQKCAQEA8d2YCODh +ApCM7GB/tmANfVQ0tnfxUT3ceEAOH7XkI+nz87Zv/1k6NOklCMi+nUwoGQfM5CxU +NdWC0I7wI1ATdllPStUAJ4c8xtdEdlrLHBcGNvhYbbqMWRsNGITstnAx3tZ4X32H +duhS5wfPE/X25YMN+8Dtm7jifEMqoCUV55iZxfYs+LXxQF03KVAJ5Ie5a1ac5UCz +zzu9fbYSs70ByJsHWt4ZOsPkJVmkmuXzUPvr72otUYYSdju0PgbJqRoEyTbCh3HT +zo0emKl8jj7oTSzVNjb6AaB6nsKco6wQLQSlaxBzo0j7TBRylVtG81CYjr5LFpp0 +UQrHjLZnSTvC5wKCAQEA6T3yH6bFc9FcJGOW1jYozQ5y+NWkXv3MVFIf3IqPT76p +rMEI6krmGUKi+otOaV2Axy36kOcbntzENMg++LPCe0SczK14+pwUrI91cp/Ega6K ++/4sKvh8WDZhzVYkWs76UiRj7Ef4MvtsaPAcFN/Ek+fItDHFRoSGdm+vx+j3ZDxx +tdRudTs0kYyhmdlM0kZTbXsmz37x6+45uO16s+D2lvX2PXM9Lve9z/Ti6nn9QvIF +kM9ZmAU6epmMPsGKM9WOK/sTcPUnd3Ife9tmi3BRAAygDk6hFx67kAsc124oLeZ3 +0CJGshA+50hBAL7wiybLrBMRzHrElzsicppVbn3p0wKCAQAldmRBI8vWYNtjFYNS +lUghnHRZuvRG2CUY/xrw8HR415jwq9ZnH8PzRBV3adiUdqJTVjD3OqKEgCC1+x3Y +6mNJVoYAmkNe3ASe6+LvzhpdrHdK9maEAHwSpSz/Gj+r9m7TDDcy2zerRErq+/uo +JNXsMMNutjBXiWiTRLgKfBQLfkh7MClBELVgec+8d2hA3IDszkqY+8+eDqvIF/aH +noPzNYgLHBGeV48z9dGYKHvqlEq0F6cTVIfxhkfhv51msuAA5pl07z2WZadSkBX5 +1maW5ZXUwukwbVHw20X12AXdYzXYAoFWzkwWOaiR18SClX47xd/NjXjswJWuBuay +oi4LAoIBAQDirP0+nYmQAYwXIWJaVNBaWQyLoLXaS7XkzNuCLncQ/S9RYVkUui3d +ptFVxUUzSVf6O0kkwjYpskxNL79jXPBJdGke0gidJktBWTq/Z15G2ibguCicqlnO +MSvjrzAtwLGuWwdxfpBMm+TEJ3ZjIwWc6Mo5tZUP74PuXqTrGBI2LDgmiom/DQcN +3SrAplrukMJLyD/zsF/U9vTKMKHrZ1q/Y9Mn7XMszkB+dnSBhIUKJsQZ9CoSgCJR +PCD8bIOv1IATZjOCt/7fKt5GNPf30/QkpCB5RxlvqsKGPwaMp9YMpcsTT/x82SUJ +CUODQg3sbovKc838d+PPRf04e51DgMNZAoIBAQC2uiJjluIKRabFSeSfu4+I6cEY +kXI0F65UAudFmyXVfaQbO9DR0Y4bWPDfXAUimRvxixEhSrSIBZ/itVxzhOvqZrl1 +XRCZsTOVoz7Z8lcd8opxPBnWDk1m2nyajwPXp8ZLo67FG0bWbayVBBRxyvirrZjG +PatRKMTyVLTCD+WlQiP4b4kShKdWA4ZH6pHUIviAotWqXMTsEKfupg9avxEk8GtH +GZnXAmpnBqmbU4+3rNOaCZLdekVCoEtW0NGZEYEV5UQnZoWY6AiUUxGGE/qionKH +sdKN+8CowudMH02bo1a0akS+eh+D/SGc/MLofH7uPWtX7l8sTvQivzDIkZeu -----END RSA PRIVATE KEY----- From cee97e1a9d369b28aac75a8550effbab9d570506 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 18 Oct 2024 08:15:30 -0700 Subject: [PATCH 162/193] Test on PostgreSQL 17 (#1189) --- .github/workflows/tests.yml | 7 +++---- README.rst | 5 +++-- asyncpg/exceptions/__init__.py | 25 +++++++++++++++++++------ docs/index.rst | 2 +- 4 files changed, 26 insertions(+), 13 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8b5bc7f5..ce06e7f5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -51,10 +51,9 @@ jobs: __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) - name: Setup PostgreSQL - uses: tj-actions/install-postgresql@2a80e9368dff47cd05fee5bf3cf7d88f68c2f8e9 # v3.1.1 if: steps.release.outputs.version == 0 && matrix.os == 'macos-latest' - with: - postgresql-version: 16 + run: | + brew install postgresql - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 @@ -83,7 +82,7 @@ jobs: test-postgres: strategy: matrix: - postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16"] + postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16", "17"] runs-on: ubuntu-latest diff --git a/README.rst b/README.rst index f7583a89..32fd1693 100644 --- a/README.rst +++ b/README.rst @@ -14,8 +14,9 @@ framework. You can read more about asyncpg in an introductory `blog post `_. asyncpg requires Python 3.8 or later and is supported for PostgreSQL -versions 9.5 to 16. Older PostgreSQL versions or other databases implementing -the PostgreSQL protocol *may* work, but are not being actively tested. +versions 9.5 to 17. Other PostgreSQL versions or other databases +implementing the PostgreSQL protocol *may* work, but are not being +actively tested. Documentation diff --git a/asyncpg/exceptions/__init__.py b/asyncpg/exceptions/__init__.py index 8c97d5a0..752fd007 100644 --- a/asyncpg/exceptions/__init__.py +++ b/asyncpg/exceptions/__init__.py @@ -121,6 +121,10 @@ class StackedDiagnosticsAccessedWithoutActiveHandlerError(DiagnosticsError): sqlstate = '0Z002' +class InvalidArgumentForXqueryError(_base.PostgresError): + sqlstate = '10608' + + class CaseNotFoundError(_base.PostgresError): sqlstate = '20000' @@ -485,6 +489,10 @@ class IdleInTransactionSessionTimeoutError(InvalidTransactionStateError): sqlstate = '25P03' +class TransactionTimeoutError(InvalidTransactionStateError): + sqlstate = '25P04' + + class InvalidSQLStatementNameError(_base.PostgresError): sqlstate = '26000' @@ -900,6 +908,10 @@ class DuplicateFileError(PostgresSystemError): sqlstate = '58P02' +class FileNameTooLongError(PostgresSystemError): + sqlstate = '58P03' + + class SnapshotTooOldError(_base.PostgresError): sqlstate = '72000' @@ -1095,9 +1107,9 @@ class IndexCorruptedError(InternalServerError): 'FDWTableNotFoundError', 'FDWTooManyHandlesError', 'FDWUnableToCreateExecutionError', 'FDWUnableToCreateReplyError', 'FDWUnableToEstablishConnectionError', 'FeatureNotSupportedError', - 'ForeignKeyViolationError', 'FunctionExecutedNoReturnStatementError', - 'GeneratedAlwaysError', 'GroupingError', - 'HeldCursorRequiresSameIsolationLevelError', + 'FileNameTooLongError', 'ForeignKeyViolationError', + 'FunctionExecutedNoReturnStatementError', 'GeneratedAlwaysError', + 'GroupingError', 'HeldCursorRequiresSameIsolationLevelError', 'IdleInTransactionSessionTimeoutError', 'IdleSessionTimeoutError', 'ImplicitZeroBitPadding', 'InFailedSQLTransactionError', 'InappropriateAccessModeForBranchTransactionError', @@ -1112,6 +1124,7 @@ class IndexCorruptedError(InternalServerError): 'InvalidArgumentForPowerFunctionError', 'InvalidArgumentForSQLJsonDatetimeFunctionError', 'InvalidArgumentForWidthBucketFunctionError', + 'InvalidArgumentForXqueryError', 'InvalidAuthorizationSpecificationError', 'InvalidBinaryRepresentationError', 'InvalidCachedStatementError', 'InvalidCatalogNameError', 'InvalidCharacterValueForCastError', @@ -1184,9 +1197,9 @@ class IndexCorruptedError(InternalServerError): 'TooManyJsonObjectMembersError', 'TooManyRowsError', 'TransactionIntegrityConstraintViolationError', 'TransactionResolutionUnknownError', 'TransactionRollbackError', - 'TriggerProtocolViolatedError', 'TriggeredActionError', - 'TriggeredDataChangeViolationError', 'TrimError', - 'UndefinedColumnError', 'UndefinedFileError', + 'TransactionTimeoutError', 'TriggerProtocolViolatedError', + 'TriggeredActionError', 'TriggeredDataChangeViolationError', + 'TrimError', 'UndefinedColumnError', 'UndefinedFileError', 'UndefinedFunctionError', 'UndefinedObjectError', 'UndefinedParameterError', 'UndefinedTableError', 'UniqueViolationError', 'UnsafeNewEnumValueUsageError', diff --git a/docs/index.rst b/docs/index.rst index 02a8457d..e0f91813 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. **asyncpg** requires Python 3.8 or later and is supported for PostgreSQL -versions 9.5 to 16. Older PostgreSQL versions or other databases implementing +versions 9.5 to 17. Other PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. Contents From b732b4fc72254149661231274d9ee199c0305dbf Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 18 Oct 2024 08:38:30 -0700 Subject: [PATCH 163/193] tests: Attempt to inhibit spurious ConnectionResetError on Windows (#1190) The proactor stuff on Windows raises a spurious `ConnectionResetError` while trying to shut down a socket in the `connection_lost` path. This is likely a Python bug (and nobody noticed because this is a background exception). Try to filter it out before complaining. --- asyncpg/_testbase/__init__.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py index 7aca834f..d4e0d43d 100644 --- a/asyncpg/_testbase/__init__.py +++ b/asyncpg/_testbase/__init__.py @@ -117,10 +117,22 @@ def setUp(self): self.__unhandled_exceptions = [] def tearDown(self): - if self.__unhandled_exceptions: + excs = [] + for exc in self.__unhandled_exceptions: + if isinstance(exc, ConnectionResetError): + texc = traceback.TracebackException.from_exception( + exc, lookup_lines=False) + if texc.stack[-1].name == "_call_connection_lost": + # On Windows calling socket.shutdown may raise + # ConnectionResetError, which happens in the + # finally block of _call_connection_lost. + continue + excs.append(exc) + + if excs: formatted = [] - for i, context in enumerate(self.__unhandled_exceptions): + for i, context in enumerate(excs): formatted.append(self._format_loop_exception(context, i + 1)) self.fail( From 73f2209d72b9d92cc48b673d5ffe15f20165f33b Mon Sep 17 00:00:00 2001 From: Ross MacArthur Date: Fri, 18 Oct 2024 20:27:33 +0200 Subject: [PATCH 164/193] Add `fetchmany` to execute many *and* return rows (#1175) Co-authored-by: Elvis Pranskevichus --- asyncpg/connection.py | 52 ++++++++++++++++++++++++++++++++-- asyncpg/pool.py | 16 +++++++++++ asyncpg/prepared_stmt.py | 28 +++++++++++++++++- asyncpg/protocol/coreproto.pxd | 2 +- asyncpg/protocol/coreproto.pyx | 6 ++-- asyncpg/protocol/protocol.pyx | 4 ++- tests/test_execute.py | 39 +++++++++++++++++++++++++ tests/test_prepare.py | 14 +++++++++ 8 files changed, 153 insertions(+), 8 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 6ac2a09d..79711c0c 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -756,6 +756,44 @@ async def fetchrow( return None return data[0] + async def fetchmany( + self, query, args, *, timeout: float=None, record_class=None + ): + """Run a query for each sequence of arguments in *args* + and return the results as a list of :class:`Record`. + + :param query: + Query to execute. + :param args: + An iterable containing sequences of arguments for the query. + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for records returned by this method. + Must be a subclass of :class:`~asyncpg.Record`. If not specified, + a per-connection *record_class* is used. + + :return list: + A list of :class:`~asyncpg.Record` instances. If specified, the + actual type of list elements would be *record_class*. + + Example: + + .. code-block:: pycon + + >>> rows = await con.fetchmany(''' + ... INSERT INTO mytab (a, b) VALUES ($1, $2) RETURNING a; + ... ''', [('x', 1), ('y', 2), ('z', 3)]) + >>> rows + [, , ] + + .. versionadded:: 0.30.0 + """ + self._check_open() + return await self._executemany( + query, args, timeout, return_rows=True, record_class=record_class + ) + async def copy_from_table(self, table_name, *, output, columns=None, schema_name=None, timeout=None, format=None, oids=None, delimiter=None, @@ -1896,17 +1934,27 @@ async def __execute( ) return result, stmt - async def _executemany(self, query, args, timeout): + async def _executemany( + self, + query, + args, + timeout, + return_rows=False, + record_class=None, + ): executor = lambda stmt, timeout: self._protocol.bind_execute_many( state=stmt, args=args, portal_name='', timeout=timeout, + return_rows=return_rows, ) timeout = self._protocol._get_timeout(timeout) with self._stmt_exclusive_section: with self._time_and_log(query, args, timeout): - result, _ = await self._do_execute(query, executor, timeout) + result, _ = await self._do_execute( + query, executor, timeout, record_class=record_class + ) return result async def _do_execute( diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 8a00d64b..19ced84b 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -609,6 +609,22 @@ async def fetchrow(self, query, *args, timeout=None, record_class=None): record_class=record_class ) + async def fetchmany(self, query, args, *, timeout=None, record_class=None): + """Run a query for each sequence of arguments in *args* + and return the results as a list of :class:`Record`. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.fetchmany() + `. + + .. versionadded:: 0.30.0 + """ + async with self.acquire() as con: + return await con.fetchmany( + query, args, timeout=timeout, record_class=record_class + ) + async def copy_from_table( self, table_name, diff --git a/asyncpg/prepared_stmt.py b/asyncpg/prepared_stmt.py index 195d0056..d66a5ad3 100644 --- a/asyncpg/prepared_stmt.py +++ b/asyncpg/prepared_stmt.py @@ -210,6 +210,27 @@ async def fetchrow(self, *args, timeout=None): return None return data[0] + @connresource.guarded + async def fetchmany(self, args, *, timeout=None): + """Execute the statement and return a list of :class:`Record` objects. + + :param args: Query arguments. + :param float timeout: Optional timeout value in seconds. + + :return: A list of :class:`Record` instances. + + .. versionadded:: 0.30.0 + """ + return await self.__do_execute( + lambda protocol: protocol.bind_execute_many( + self._state, + args, + portal_name='', + timeout=timeout, + return_rows=True, + ) + ) + @connresource.guarded async def executemany(self, args, *, timeout: float=None): """Execute the statement for each sequence of arguments in *args*. @@ -222,7 +243,12 @@ async def executemany(self, args, *, timeout: float=None): """ return await self.__do_execute( lambda protocol: protocol.bind_execute_many( - self._state, args, '', timeout)) + self._state, + args, + portal_name='', + timeout=timeout, + return_rows=False, + )) async def __do_execute(self, executor): protocol = self._connection._protocol diff --git a/asyncpg/protocol/coreproto.pxd b/asyncpg/protocol/coreproto.pxd index f6a0b08f..34c7c712 100644 --- a/asyncpg/protocol/coreproto.pxd +++ b/asyncpg/protocol/coreproto.pxd @@ -171,7 +171,7 @@ cdef class CoreProtocol: cdef _bind_execute(self, str portal_name, str stmt_name, WriteBuffer bind_data, int32_t limit) cdef bint _bind_execute_many(self, str portal_name, str stmt_name, - object bind_data) + object bind_data, bint return_rows) cdef bint _bind_execute_many_more(self, bint first=*) cdef _bind_execute_many_fail(self, object error, bint first=*) cdef _bind(self, str portal_name, str stmt_name, diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 4ef438cd..19857878 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -1020,12 +1020,12 @@ cdef class CoreProtocol: self._send_bind_message(portal_name, stmt_name, bind_data, limit) cdef bint _bind_execute_many(self, str portal_name, str stmt_name, - object bind_data): + object bind_data, bint return_rows): self._ensure_connected() self._set_state(PROTOCOL_BIND_EXECUTE_MANY) - self.result = None - self._discard_data = True + self.result = [] if return_rows else None + self._discard_data = not return_rows self._execute_iter = bind_data self._execute_portal_name = portal_name self._execute_stmt_name = stmt_name diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index 1459d908..bd2ad05c 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -212,6 +212,7 @@ cdef class BaseProtocol(CoreProtocol): args, portal_name: str, timeout, + return_rows: bool, ): if self.cancel_waiter is not None: await self.cancel_waiter @@ -237,7 +238,8 @@ cdef class BaseProtocol(CoreProtocol): more = self._bind_execute_many( portal_name, state.name, - arg_bufs) # network op + arg_bufs, + return_rows) # network op self.last_query = state.query self.statement = state diff --git a/tests/test_execute.py b/tests/test_execute.py index 78d8c124..f8a0e43a 100644 --- a/tests/test_execute.py +++ b/tests/test_execute.py @@ -139,6 +139,45 @@ async def test_executemany_basic(self): ('a', 1), ('b', 2), ('c', 3), ('d', 4) ]) + async def test_executemany_returning(self): + result = await self.con.fetchmany(''' + INSERT INTO exmany VALUES($1, $2) RETURNING a, b + ''', [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) + + # Empty set + await self.con.fetchmany(''' + INSERT INTO exmany VALUES($1, $2) RETURNING a, b + ''', ()) + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4) + ]) + + # Without "RETURNING" + result = await self.con.fetchmany(''' + INSERT INTO exmany VALUES($1, $2) + ''', [('e', 5), ('f', 6)]) + self.assertEqual(result, []) + result = await self.con.fetch(''' + SELECT * FROM exmany + ''') + self.assertEqual(result, [ + ('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6) + ]) + async def test_executemany_bad_input(self): with self.assertRaisesRegex( exceptions.DataError, diff --git a/tests/test_prepare.py b/tests/test_prepare.py index 5911ccf2..661021bd 100644 --- a/tests/test_prepare.py +++ b/tests/test_prepare.py @@ -611,3 +611,17 @@ async def test_prepare_explicitly_named(self): 'prepared statement "foobar" already exists', ): await self.con.prepare('select 1', name='foobar') + + async def test_prepare_fetchmany(self): + tr = self.con.transaction() + await tr.start() + try: + await self.con.execute('CREATE TABLE fetchmany (a int, b text)') + + stmt = await self.con.prepare( + 'INSERT INTO fetchmany (a, b) VALUES ($1, $2) RETURNING a, b' + ) + result = await stmt.fetchmany([(1, 'a'), (2, 'b'), (3, 'c')]) + self.assertEqual(result, [(1, 'a'), (2, 'b'), (3, 'c')]) + finally: + await tr.rollback() From 3ee19baaba3971f8f21d2190e443b1e5f25a9052 Mon Sep 17 00:00:00 2001 From: Daniel Manson Date: Fri, 18 Oct 2024 19:54:20 +0100 Subject: [PATCH 165/193] Add connect_fn kwarg to Pool to better support GCP's CloudSQL (#1170) Co-authored-by: Elvis Pranskevichus --- asyncpg/_testbase/__init__.py | 13 +++++++--- asyncpg/pool.py | 49 +++++++++++++++++++++++++++++------ tests/test_pool.py | 21 ++++++++++++++- 3 files changed, 71 insertions(+), 12 deletions(-) diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py index d4e0d43d..2d785dac 100644 --- a/asyncpg/_testbase/__init__.py +++ b/asyncpg/_testbase/__init__.py @@ -274,6 +274,7 @@ def create_pool(dsn=None, *, max_size=10, max_queries=50000, max_inactive_connection_lifetime=60.0, + connect=None, setup=None, init=None, loop=None, @@ -283,12 +284,18 @@ def create_pool(dsn=None, *, **connect_kwargs): return pool_class( dsn, - min_size=min_size, max_size=max_size, - max_queries=max_queries, loop=loop, setup=setup, init=init, + min_size=min_size, + max_size=max_size, + max_queries=max_queries, + loop=loop, + connect=connect, + setup=setup, + init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, connection_class=connection_class, record_class=record_class, - **connect_kwargs) + **connect_kwargs, + ) class ClusterTestCase(TestCase): diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 19ced84b..a18dd3b3 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -313,7 +313,7 @@ class Pool: __slots__ = ( '_queue', '_loop', '_minsize', '_maxsize', - '_init', '_connect_args', '_connect_kwargs', + '_init', '_connect', '_connect_args', '_connect_kwargs', '_holders', '_initialized', '_initializing', '_closing', '_closed', '_connection_class', '_record_class', '_generation', '_setup', '_max_queries', '_max_inactive_connection_lifetime' @@ -324,8 +324,9 @@ def __init__(self, *connect_args, max_size, max_queries, max_inactive_connection_lifetime, - setup, - init, + connect=None, + setup=None, + init=None, loop, connection_class, record_class, @@ -385,11 +386,14 @@ def __init__(self, *connect_args, self._closing = False self._closed = False self._generation = 0 - self._init = init + + self._connect = connect if connect is not None else connection.connect self._connect_args = connect_args self._connect_kwargs = connect_kwargs self._setup = setup + self._init = init + self._max_queries = max_queries self._max_inactive_connection_lifetime = \ max_inactive_connection_lifetime @@ -503,13 +507,25 @@ def set_connect_args(self, dsn=None, **connect_kwargs): self._connect_kwargs = connect_kwargs async def _get_new_connection(self): - con = await connection.connect( + con = await self._connect( *self._connect_args, loop=self._loop, connection_class=self._connection_class, record_class=self._record_class, **self._connect_kwargs, ) + if not isinstance(con, self._connection_class): + good = self._connection_class + good_n = f'{good.__module__}.{good.__name__}' + bad = type(con) + if bad.__module__ == "builtins": + bad_n = bad.__name__ + else: + bad_n = f'{bad.__module__}.{bad.__name__}' + raise exceptions.InterfaceError( + "expected pool connect callback to return an instance of " + f"'{good_n}', got " f"'{bad_n}'" + ) if self._init is not None: try: @@ -1017,6 +1033,7 @@ def create_pool(dsn=None, *, max_size=10, max_queries=50000, max_inactive_connection_lifetime=300.0, + connect=None, setup=None, init=None, loop=None, @@ -1099,6 +1116,13 @@ def create_pool(dsn=None, *, Number of seconds after which inactive connections in the pool will be closed. Pass ``0`` to disable this mechanism. + :param coroutine connect: + A coroutine that is called instead of + :func:`~asyncpg.connection.connect` whenever the pool needs to make a + new connection. Must return an instance of type specified by + *connection_class* or :class:`~asyncpg.connection.Connection` if + *connection_class* was not specified. + :param coroutine setup: A coroutine to prepare a connection right before it is returned from :meth:`Pool.acquire() `. An example use @@ -1139,12 +1163,21 @@ def create_pool(dsn=None, *, .. versionchanged:: 0.22.0 Added the *record_class* parameter. + + .. versionchanged:: 0.30.0 + Added the *connect* parameter. """ return Pool( dsn, connection_class=connection_class, record_class=record_class, - min_size=min_size, max_size=max_size, - max_queries=max_queries, loop=loop, setup=setup, init=init, + min_size=min_size, + max_size=max_size, + max_queries=max_queries, + loop=loop, + connect=connect, + setup=setup, + init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, - **connect_kwargs) + **connect_kwargs, + ) diff --git a/tests/test_pool.py b/tests/test_pool.py index 2407b817..5bd70bd9 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -136,6 +136,12 @@ async def setup(con): async def test_pool_07(self): cons = set() + connect_called = 0 + + async def connect(*args, **kwargs): + nonlocal connect_called + connect_called += 1 + return await pg_connection.connect(*args, **kwargs) async def setup(con): if con._con not in cons: # `con` is `PoolConnectionProxy`. @@ -152,13 +158,26 @@ async def user(pool): raise RuntimeError('init was not called') async with self.create_pool(database='postgres', - min_size=2, max_size=5, + min_size=2, + max_size=5, + connect=connect, init=init, setup=setup) as pool: users = asyncio.gather(*[user(pool) for _ in range(10)]) await users self.assertEqual(len(cons), 5) + self.assertEqual(connect_called, 5) + + async def bad_connect(*args, **kwargs): + return 1 + + with self.assertRaisesRegex( + asyncpg.InterfaceError, + "expected pool connect callback to return an instance of " + "'asyncpg\\.connection\\.Connection', got 'int'" + ): + await self.create_pool(database='postgres', connect=bad_connect) async def test_pool_08(self): pool = await self.create_pool(database='postgres', From 3ef884efb58497e92dc54481537042edd2ea5b30 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 18 Oct 2024 13:11:11 -0700 Subject: [PATCH 166/193] Upgrade Sphinx --- docs/conf.py | 1 - pyproject.toml | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 27924236..50d20ffc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,7 +26,6 @@ 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'sphinx.ext.intersphinx', - 'sphinxcontrib.asyncio', ] add_module_names = False diff --git a/pyproject.toml b/pyproject.toml index 15c034f4..4bb9e8f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,8 +49,7 @@ test = [ 'mypy~=1.8.0', ] docs = [ - 'Sphinx~=5.3.0', - 'sphinxcontrib-asyncio~=0.3.0', + 'Sphinx~=8.1.3', 'sphinx_rtd_theme>=1.2.2', ] From f6ec755cab771682c5cc9b881a67097691415636 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 18 Oct 2024 13:22:57 -0700 Subject: [PATCH 167/193] Allow customizing connection state reset (#1191) A coroutine can be passed to the new `reset` argument of `create_pool` to control what happens to the connection when it is returned back to the pool by `release()`. By default `Connection.reset()` is called. Additionally, `Connection.get_reset_query` is renamed from `Connection._get_reset_query` to enable an alternative way of customizing the reset process via subclassing. Closes: #780 Closes: #1146 --- asyncpg/connection.py | 45 +++++++++++++++++++++++++++++++++++++------ asyncpg/pool.py | 36 ++++++++++++++++++++++++++++++---- tests/test_pool.py | 17 +++++++++++++++- 3 files changed, 87 insertions(+), 11 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 79711c0c..3a86466c 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -1515,11 +1515,10 @@ def terminate(self): self._abort() self._cleanup() - async def reset(self, *, timeout=None): + async def _reset(self): self._check_open() self._listeners.clear() self._log_listeners.clear() - reset_query = self._get_reset_query() if self._protocol.is_in_transaction() or self._top_xact is not None: if self._top_xact is None or not self._top_xact._managed: @@ -1531,10 +1530,36 @@ async def reset(self, *, timeout=None): }) self._top_xact = None - reset_query = 'ROLLBACK;\n' + reset_query + await self.execute("ROLLBACK") + + async def reset(self, *, timeout=None): + """Reset the connection state. + + Calling this will reset the connection session state to a state + resembling that of a newly obtained connection. Namely, an open + transaction (if any) is rolled back, open cursors are closed, + all `LISTEN `_ + registrations are removed, all session configuration + variables are reset to their default values, and all advisory locks + are released. + + Note that the above describes the default query returned by + :meth:`Connection.get_reset_query`. If one overloads the method + by subclassing ``Connection``, then this method will do whatever + the overloaded method returns, except open transactions are always + terminated and any callbacks registered by + :meth:`Connection.add_listener` or :meth:`Connection.add_log_listener` + are removed. - if reset_query: - await self.execute(reset_query, timeout=timeout) + :param float timeout: + A timeout for resetting the connection. If not specified, defaults + to no timeout. + """ + async with compat.timeout(timeout): + await self._reset() + reset_query = self.get_reset_query() + if reset_query: + await self.execute(reset_query) def _abort(self): # Put the connection into the aborted state. @@ -1695,7 +1720,15 @@ def _unwrap(self): con_ref = self._proxy return con_ref - def _get_reset_query(self): + def get_reset_query(self): + """Return the query sent to server on connection release. + + The query returned by this method is used by :meth:`Connection.reset`, + which is, in turn, used by :class:`~asyncpg.pool.Pool` before making + the connection available to another acquirer. + + .. versionadded:: 0.30.0 + """ if self._reset_query is not None: return self._reset_query diff --git a/asyncpg/pool.py b/asyncpg/pool.py index a18dd3b3..e3898d53 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -210,7 +210,12 @@ async def release(self, timeout): if budget is not None: budget -= time.monotonic() - started - await self._con.reset(timeout=budget) + if self._pool._reset is not None: + async with compat.timeout(budget): + await self._con._reset() + await self._pool._reset(self._con) + else: + await self._con.reset(timeout=budget) except (Exception, asyncio.CancelledError) as ex: # If the `reset` call failed, terminate the connection. # A new one will be created when `acquire` is called @@ -313,7 +318,7 @@ class Pool: __slots__ = ( '_queue', '_loop', '_minsize', '_maxsize', - '_init', '_connect', '_connect_args', '_connect_kwargs', + '_init', '_connect', '_reset', '_connect_args', '_connect_kwargs', '_holders', '_initialized', '_initializing', '_closing', '_closed', '_connection_class', '_record_class', '_generation', '_setup', '_max_queries', '_max_inactive_connection_lifetime' @@ -327,6 +332,7 @@ def __init__(self, *connect_args, connect=None, setup=None, init=None, + reset=None, loop, connection_class, record_class, @@ -393,6 +399,7 @@ def __init__(self, *connect_args, self._setup = setup self._init = init + self._reset = reset self._max_queries = max_queries self._max_inactive_connection_lifetime = \ @@ -1036,6 +1043,7 @@ def create_pool(dsn=None, *, connect=None, setup=None, init=None, + reset=None, loop=None, connection_class=connection.Connection, record_class=protocol.Record, @@ -1125,7 +1133,7 @@ def create_pool(dsn=None, *, :param coroutine setup: A coroutine to prepare a connection right before it is returned - from :meth:`Pool.acquire() `. An example use + from :meth:`Pool.acquire()`. An example use case would be to automatically set up notifications listeners for all connections of a pool. @@ -1137,6 +1145,25 @@ def create_pool(dsn=None, *, or :meth:`Connection.set_type_codec() <\ asyncpg.connection.Connection.set_type_codec>`. + :param coroutine reset: + A coroutine to reset a connection before it is returned to the pool by + :meth:`Pool.release()`. The function is supposed + to reset any changes made to the database session so that the next + acquirer gets the connection in a well-defined state. + + The default implementation calls :meth:`Connection.reset() <\ + asyncpg.connection.Connection.reset>`, which runs the following:: + + SELECT pg_advisory_unlock_all(); + CLOSE ALL; + UNLISTEN *; + RESET ALL; + + The exact reset query is determined by detected server capabilities, + and a custom *reset* implementation can obtain the default query + by calling :meth:`Connection.get_reset_query() <\ + asyncpg.connection.Connection.get_reset_query>`. + :param loop: An asyncio event loop instance. If ``None``, the default event loop will be used. @@ -1165,7 +1192,7 @@ def create_pool(dsn=None, *, Added the *record_class* parameter. .. versionchanged:: 0.30.0 - Added the *connect* parameter. + Added the *connect* and *reset* parameters. """ return Pool( dsn, @@ -1178,6 +1205,7 @@ def create_pool(dsn=None, *, connect=connect, setup=setup, init=init, + reset=reset, max_inactive_connection_lifetime=max_inactive_connection_lifetime, **connect_kwargs, ) diff --git a/tests/test_pool.py b/tests/test_pool.py index 5bd70bd9..3f10ae5c 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -137,6 +137,9 @@ async def setup(con): async def test_pool_07(self): cons = set() connect_called = 0 + init_called = 0 + setup_called = 0 + reset_called = 0 async def connect(*args, **kwargs): nonlocal connect_called @@ -144,13 +147,21 @@ async def connect(*args, **kwargs): return await pg_connection.connect(*args, **kwargs) async def setup(con): + nonlocal setup_called if con._con not in cons: # `con` is `PoolConnectionProxy`. raise RuntimeError('init was not called before setup') + setup_called += 1 async def init(con): + nonlocal init_called if con in cons: raise RuntimeError('init was called more than once') cons.add(con) + init_called += 1 + + async def reset(con): + nonlocal reset_called + reset_called += 1 async def user(pool): async with pool.acquire() as con: @@ -162,12 +173,16 @@ async def user(pool): max_size=5, connect=connect, init=init, - setup=setup) as pool: + setup=setup, + reset=reset) as pool: users = asyncio.gather(*[user(pool) for _ in range(10)]) await users self.assertEqual(len(cons), 5) self.assertEqual(connect_called, 5) + self.assertEqual(init_called, 5) + self.assertEqual(setup_called, 10) + self.assertEqual(reset_called, 10) async def bad_connect(*args, **kwargs): return 1 From 965fb0825426edf0e4607dd85305c6f4b6c0c9f8 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 18 Oct 2024 15:08:02 -0700 Subject: [PATCH 168/193] Fix release workflow Install Kerberos deps when testing wheels and make cibuildwheel tests run properly. --- .github/workflows/install-krb5.sh | 42 ++++++++++-- .github/workflows/install-postgres.sh | 93 +++++++++++++++------------ .github/workflows/release.yml | 29 ++++++--- .github/workflows/tests.yml | 24 +++---- asyncpg/_testbase/__init__.py | 15 ++--- asyncpg/cluster.py | 57 +++++++++++++--- pyproject.toml | 7 +- tests/test_connect.py | 10 ++- 8 files changed, 191 insertions(+), 86 deletions(-) diff --git a/.github/workflows/install-krb5.sh b/.github/workflows/install-krb5.sh index 093b8519..bdb5744d 100755 --- a/.github/workflows/install-krb5.sh +++ b/.github/workflows/install-krb5.sh @@ -1,10 +1,42 @@ #!/bin/bash set -Eexuo pipefail +shopt -s nullglob -if [ "$RUNNER_OS" == "Linux" ]; then - # Assume Ubuntu since this is the only Linux used in CI. - sudo apt-get update - sudo apt-get install -y --no-install-recommends \ - libkrb5-dev krb5-user krb5-kdc krb5-admin-server +if [[ $OSTYPE == linux* ]]; then + if [ "$(id -u)" = "0" ]; then + SUDO= + else + SUDO=sudo + fi + + if [ -e /etc/os-release ]; then + source /etc/os-release + elif [ -e /etc/centos-release ]; then + ID="centos" + VERSION_ID=$(cat /etc/centos-release | cut -f3 -d' ' | cut -f1 -d.) + else + echo "install-krb5.sh: cannot determine which Linux distro this is" >&2 + exit 1 + fi + + if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then + export DEBIAN_FRONTEND=noninteractive + + $SUDO apt-get update + $SUDO apt-get install -y --no-install-recommends \ + libkrb5-dev krb5-user krb5-kdc krb5-admin-server + elif [ "${ID}" = "almalinux" ]; then + $SUDO dnf install -y krb5-server krb5-workstation krb5-libs krb5-devel + elif [ "${ID}" = "centos" ]; then + $SUDO yum install -y krb5-server krb5-workstation krb5-libs krb5-devel + elif [ "${ID}" = "alpine" ]; then + $SUDO apk add krb5 krb5-server krb5-dev + else + echo "install-krb5.sh: Unsupported linux distro: ${distro}" >&2 + exit 1 + fi +else + echo "install-krb5.sh: unsupported OS: ${OSTYPE}" >&2 + exit 1 fi diff --git a/.github/workflows/install-postgres.sh b/.github/workflows/install-postgres.sh index 4ffbb4d6..733c7033 100755 --- a/.github/workflows/install-postgres.sh +++ b/.github/workflows/install-postgres.sh @@ -3,51 +3,60 @@ set -Eexuo pipefail shopt -s nullglob -PGVERSION=${PGVERSION:-12} +if [[ $OSTYPE == linux* ]]; then + PGVERSION=${PGVERSION:-12} -if [ -e /etc/os-release ]; then - source /etc/os-release -elif [ -e /etc/centos-release ]; then - ID="centos" - VERSION_ID=$(cat /etc/centos-release | cut -f3 -d' ' | cut -f1 -d.) -else - echo "install-postgres.sh: cannot determine which Linux distro this is" >&2 - exit 1 -fi + if [ -e /etc/os-release ]; then + source /etc/os-release + elif [ -e /etc/centos-release ]; then + ID="centos" + VERSION_ID=$(cat /etc/centos-release | cut -f3 -d' ' | cut -f1 -d.) + else + echo "install-postgres.sh: cannot determine which Linux distro this is" >&2 + exit 1 + fi + + if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then + export DEBIAN_FRONTEND=noninteractive -if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then - export DEBIAN_FRONTEND=noninteractive - - apt-get install -y --no-install-recommends curl gnupg ca-certificates - curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - - mkdir -p /etc/apt/sources.list.d/ - echo "deb https://apt.postgresql.org/pub/repos/apt/ ${VERSION_CODENAME}-pgdg main" \ - >> /etc/apt/sources.list.d/pgdg.list - apt-get update - apt-get install -y --no-install-recommends \ - "postgresql-${PGVERSION}" \ - "postgresql-contrib-${PGVERSION}" -elif [ "${ID}" = "almalinux" ]; then - yum install -y \ - "postgresql-server" \ - "postgresql-devel" \ - "postgresql-contrib" -elif [ "${ID}" = "centos" ]; then - el="EL-${VERSION_ID%.*}-$(arch)" - baseurl="https://download.postgresql.org/pub/repos/yum/reporpms" - yum install -y "${baseurl}/${el}/pgdg-redhat-repo-latest.noarch.rpm" - if [ ${VERSION_ID%.*} -ge 8 ]; then - dnf -qy module disable postgresql + apt-get install -y --no-install-recommends curl gnupg ca-certificates + curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - + mkdir -p /etc/apt/sources.list.d/ + echo "deb https://apt.postgresql.org/pub/repos/apt/ ${VERSION_CODENAME}-pgdg main" \ + >> /etc/apt/sources.list.d/pgdg.list + apt-get update + apt-get install -y --no-install-recommends \ + "postgresql-${PGVERSION}" \ + "postgresql-contrib-${PGVERSION}" + elif [ "${ID}" = "almalinux" ]; then + yum install -y \ + "postgresql-server" \ + "postgresql-devel" \ + "postgresql-contrib" + elif [ "${ID}" = "centos" ]; then + el="EL-${VERSION_ID%.*}-$(arch)" + baseurl="https://download.postgresql.org/pub/repos/yum/reporpms" + yum install -y "${baseurl}/${el}/pgdg-redhat-repo-latest.noarch.rpm" + if [ ${VERSION_ID%.*} -ge 8 ]; then + dnf -qy module disable postgresql + fi + yum install -y \ + "postgresql${PGVERSION}-server" \ + "postgresql${PGVERSION}-contrib" + ln -s "/usr/pgsql-${PGVERSION}/bin/pg_config" "/usr/local/bin/pg_config" + elif [ "${ID}" = "alpine" ]; then + apk add shadow postgresql postgresql-dev postgresql-contrib + else + echo "install-postgres.sh: unsupported Linux distro: ${distro}" >&2 + exit 1 fi - yum install -y \ - "postgresql${PGVERSION}-server" \ - "postgresql${PGVERSION}-contrib" - ln -s "/usr/pgsql-${PGVERSION}/bin/pg_config" "/usr/local/bin/pg_config" -elif [ "${ID}" = "alpine" ]; then - apk add shadow postgresql postgresql-dev postgresql-contrib + + useradd -m -s /bin/bash apgtest + +elif [[ $OSTYPE == darwin* ]]; then + brew install postgresql + else - echo "install-postgres.sh: Unsupported distro: ${distro}" >&2 + echo "install-postgres.sh: unsupported OS: ${OSTYPE}" >&2 exit 1 fi - -useradd -m -s /bin/bash apgtest diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a32a3aeb..5ea543eb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -39,8 +39,8 @@ jobs: - uses: actions/upload-artifact@v4 with: - name: dist - path: dist/ + name: dist-version + path: dist/VERSION build-sdist: needs: validate-release-request @@ -67,7 +67,7 @@ jobs: - uses: actions/upload-artifact@v4 with: - name: dist + name: dist-sdist path: dist/*.tar.* build-wheels-matrix: @@ -127,9 +127,19 @@ jobs: - uses: actions/upload-artifact@v4 with: - name: dist + name: dist-wheels-${{ matrix.only }} path: wheelhouse/*.whl + merge-artifacts: + runs-on: ubuntu-latest + needs: [build-sdist, build-wheels] + steps: + - name: Merge Artifacts + uses: actions/upload-artifact/merge@v4 + with: + name: dist + delete-merged: true + publish-docs: needs: [build-sdist, build-wheels] runs-on: ubuntu-latest @@ -180,6 +190,12 @@ jobs: needs: [build-sdist, build-wheels, publish-docs] runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/asyncpg + permissions: + id-token: write + steps: - uses: actions/checkout@v4 with: @@ -223,7 +239,4 @@ jobs: - name: Upload to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ - password: ${{ secrets.PYPI_TOKEN }} - # password: ${{ secrets.TEST_PYPI_TOKEN }} - # repository_url: https://test.pypi.org/legacy/ + attestations: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ce06e7f5..a4869312 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -48,28 +48,28 @@ jobs: missing_version_ok: yes version_file: asyncpg/_version.py version_line_pattern: | - __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + __version__(?:\s*:\s*typing\.Final)?\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) - name: Setup PostgreSQL - if: steps.release.outputs.version == 0 && matrix.os == 'macos-latest' + if: "!steps.release.outputs.is_release && matrix.os == 'macos-latest'" run: | brew install postgresql - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" with: python-version: ${{ matrix.python-version }} - name: Install Python Deps - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" run: | - .github/workflows/install-krb5.sh + [ "$RUNNER_OS" = "Linux" ] && .github/workflows/install-krb5.sh python -m pip install -U pip setuptools wheel python -m pip install -e .[test] - name: Test - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" env: LOOP_IMPL: ${{ matrix.loop }} run: | @@ -103,10 +103,10 @@ jobs: missing_version_ok: yes version_file: asyncpg/_version.py version_line_pattern: | - __version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) + __version__(?:\s*:\s*typing\.Final)?\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) - name: Set up PostgreSQL - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" env: PGVERSION: ${{ matrix.postgres-version }} DISTRO_NAME: focal @@ -118,19 +118,19 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" with: python-version: "3.x" - name: Install Python Deps - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" run: | - .github/workflows/install-krb5.sh + [ "$RUNNER_OS" = "Linux" ] && .github/workflows/install-krb5.sh python -m pip install -U pip setuptools wheel python -m pip install -e .[test] - name: Test - if: steps.release.outputs.version == 0 + if: "!steps.release.outputs.is_release" env: PGVERSION: ${{ matrix.postgres-version }} run: | diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py index 2d785dac..95775e11 100644 --- a/asyncpg/_testbase/__init__.py +++ b/asyncpg/_testbase/__init__.py @@ -226,13 +226,6 @@ def _init_cluster(ClusterCls, cluster_kwargs, initdb_options=None): return cluster -def _start_cluster(ClusterCls, cluster_kwargs, server_settings, - initdb_options=None): - cluster = _init_cluster(ClusterCls, cluster_kwargs, initdb_options) - cluster.start(port='dynamic', server_settings=server_settings) - return cluster - - def _get_initdb_options(initdb_options=None): if not initdb_options: initdb_options = {} @@ -256,8 +249,12 @@ def _init_default_cluster(initdb_options=None): _default_cluster = pg_cluster.RunningCluster() else: _default_cluster = _init_cluster( - pg_cluster.TempCluster, cluster_kwargs={}, - initdb_options=_get_initdb_options(initdb_options)) + pg_cluster.TempCluster, + cluster_kwargs={ + "data_dir_suffix": ".apgtest", + }, + initdb_options=_get_initdb_options(initdb_options), + ) return _default_cluster diff --git a/asyncpg/cluster.py b/asyncpg/cluster.py index 4467cc2a..606c2eae 100644 --- a/asyncpg/cluster.py +++ b/asyncpg/cluster.py @@ -9,9 +9,11 @@ import os import os.path import platform +import random import re import shutil import socket +import string import subprocess import sys import tempfile @@ -45,6 +47,29 @@ def find_available_port(): sock.close() +def _world_readable_mkdtemp(suffix=None, prefix=None, dir=None): + name = "".join(random.choices(string.ascii_lowercase, k=8)) + if dir is None: + dir = tempfile.gettempdir() + if prefix is None: + prefix = tempfile.gettempprefix() + if suffix is None: + suffix = "" + fn = os.path.join(dir, prefix + name + suffix) + os.mkdir(fn, 0o755) + return fn + + +def _mkdtemp(suffix=None, prefix=None, dir=None): + if _system == 'Windows' and os.environ.get("GITHUB_ACTIONS"): + # Due to mitigations introduced in python/cpython#118486 + # when Python runs in a session created via an SSH connection + # tempfile.mkdtemp creates directories that are not accessible. + return _world_readable_mkdtemp(suffix, prefix, dir) + else: + return tempfile.mkdtemp(suffix, prefix, dir) + + class ClusterError(Exception): pass @@ -122,9 +147,13 @@ def init(self, **settings): else: extra_args = [] + os.makedirs(self._data_dir, exist_ok=True) process = subprocess.run( [self._pg_ctl, 'init', '-D', self._data_dir] + extra_args, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=self._data_dir, + ) output = process.stdout @@ -199,7 +228,10 @@ def start(self, wait=60, *, server_settings={}, **opts): process = subprocess.run( [self._pg_ctl, 'start', '-D', self._data_dir, '-o', ' '.join(extra_args)], - stdout=stdout, stderr=subprocess.STDOUT) + stdout=stdout, + stderr=subprocess.STDOUT, + cwd=self._data_dir, + ) if process.returncode != 0: if process.stderr: @@ -218,7 +250,10 @@ def start(self, wait=60, *, server_settings={}, **opts): self._daemon_process = \ subprocess.Popen( [self._postgres, '-D', self._data_dir, *extra_args], - stdout=stdout, stderr=subprocess.STDOUT) + stdout=stdout, + stderr=subprocess.STDOUT, + cwd=self._data_dir, + ) self._daemon_pid = self._daemon_process.pid @@ -232,7 +267,10 @@ def reload(self): process = subprocess.run( [self._pg_ctl, 'reload', '-D', self._data_dir], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=self._data_dir, + ) stderr = process.stderr @@ -245,7 +283,10 @@ def stop(self, wait=60): process = subprocess.run( [self._pg_ctl, 'stop', '-D', self._data_dir, '-t', str(wait), '-m', 'fast'], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=self._data_dir, + ) stderr = process.stderr @@ -583,9 +624,9 @@ class TempCluster(Cluster): def __init__(self, *, data_dir_suffix=None, data_dir_prefix=None, data_dir_parent=None, pg_config_path=None): - self._data_dir = tempfile.mkdtemp(suffix=data_dir_suffix, - prefix=data_dir_prefix, - dir=data_dir_parent) + self._data_dir = _mkdtemp(suffix=data_dir_suffix, + prefix=data_dir_prefix, + dir=data_dir_parent) super().__init__(self._data_dir, pg_config_path=pg_config_path) diff --git a/pyproject.toml b/pyproject.toml index 4bb9e8f0..dabb7d8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ gssauth = [ test = [ 'flake8~=6.1', 'flake8-pyi~=24.1.0', + 'distro~=1.9.0', 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.14.0"', 'gssapi; platform_system == "Linux"', 'k5test; platform_system == "Linux"', @@ -75,13 +76,17 @@ build-frontend = "build" test-extras = "test" [tool.cibuildwheel.macos] +before-all = ".github/workflows/install-postgres.sh" test-command = "python {project}/tests/__init__.py" [tool.cibuildwheel.windows] test-command = "python {project}\\tests\\__init__.py" [tool.cibuildwheel.linux] -before-all = ".github/workflows/install-postgres.sh" +before-all = """ + .github/workflows/install-postgres.sh \ + && .github/workflows/install-krb5.sh \ + """ test-command = """\ PY=`which python` \ && chmod -R go+rX "$(dirname $(dirname $(dirname $PY)))" \ diff --git a/tests/test_connect.py b/tests/test_connect.py index 517f05f9..0037ee5e 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -24,6 +24,8 @@ import warnings import weakref +import distro + import asyncpg from asyncpg import _testbase as tb from asyncpg import connection as pg_connection @@ -388,6 +390,10 @@ async def test_auth_md5_unsupported(self, _): await self.connect(user='md5_user', password=CORRECT_PASSWORD) +@unittest.skipIf( + distro.id() == "alpine", + "Alpine Linux ships PostgreSQL without GSS auth support", +) class TestGssAuthentication(BaseTestAuthentication): @classmethod def setUpClass(cls): @@ -426,10 +432,11 @@ def setup_cluster(cls): cls.start_cluster( cls.cluster, server_settings=cls.get_server_settings()) - async def test_auth_gssapi(self): + async def test_auth_gssapi_ok(self): conn = await self.connect(user=self.realm.user_princ) await conn.close() + async def test_auth_gssapi_bad_srvname(self): # Service name mismatch. with self.assertRaisesRegex( exceptions.InternalClientError, @@ -437,6 +444,7 @@ async def test_auth_gssapi(self): ): await self.connect(user=self.realm.user_princ, krbsrvname='wrong') + async def test_auth_gssapi_bad_user(self): # Credentials mismatch. with self.assertRaisesRegex( exceptions.InvalidAuthorizationSpecificationError, From bae282ec79157ca1e0018ea0753edefe14c9d50c Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 18 Oct 2024 13:25:30 -0700 Subject: [PATCH 169/193] asyncpg v0.30.0 Support Python 3.13 and PostgreSQL 17. Improvements ============ * Implement GSSAPI authentication (by @eltoder in 1d4e5680 for #1122) * Implement SSPI authentication (by @eltoder in 1aab2094 for #1128) * Add initial typings (by @bryanforbes in d42432bf for #1127) * Allow building with Cython 3 (by @musicinmybrain in 258d8a95 for #1101) * docs: fix connection pool close call (#1125) (by @paulovitorweb in e8488149 for #1125) * Add support for the `sslnegotiation` parameter (by @elprans in afdb05c7 for #1187) * Test and build on Python 3.13 (by @elprans in 3aa98944 for #1188) * Support PostgreSQL 17 (by @elprans in cee97e1a for #1189) (by @MeggyCal in aa2d0e69 for #1185) * Add `fetchmany` to execute many *and* return rows (by @rossmacarthur in 73f2209d for #1175) * Add `connect` kwarg to Pool to better support GCP's CloudSQL (by @d1manson in 3ee19baa for #1170) * Allow customizing connection state reset (#1191) (by @elprans in f6ec755c for #1191) Fixes ===== * s/quote/quote_plus/ in the note about DSN part quoting (by @elprans in 1194a8a6 for #1151) * Use asyncio.run() instead of run_until_complete() (by @eltoder in 9fcddfc1 for #1140) * Require async_timeout for python < 3.11 (#1177) (by @Pliner in 327f2a7a for #1177) * Allow testing with uvloop on Python 3.12 (#1182) (by @musicinmybrain in 597fe541 for #1182) * Mark pool-wrapped connection coroutine methods as coroutines (by @elprans in 636420b1 for #1134) * handle `None` parameters in `copy_from_query`, returning `NULL` (by @fobispotc in 259d16e5 for #1180) * fix: return the pool from _async_init__ if it's already initialized (#1104) (by @guacs in 7dc58728 for #1104) * Replace obsolete, unsafe `Py_TRASHCAN_SAFE_BEGIN/END` (#1150) (by @musicinmybrain in 11101c6e for #1150) --- .github/workflows/release.yml | 3 +++ asyncpg/_version.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5ea543eb..7c9a366d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -195,6 +195,9 @@ jobs: url: https://pypi.org/p/asyncpg permissions: id-token: write + attestations: write + contents: write + deployments: write steps: - uses: actions/checkout@v4 diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 383fe4d2..245eee7e 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -14,4 +14,4 @@ import typing -__version__: typing.Final = '0.30.0.dev0' +__version__: typing.Final = '0.30.0' From a273e0e104f536f9b676ed805f115744a0ae68ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dani=C3=ABl=20van=20Noord?= <13665637+DanielNoord@users.noreply.github.com> Date: Tue, 29 Oct 2024 21:13:12 +0100 Subject: [PATCH 170/193] Add typing to two objects in `connection_utils` (#1198) --- asyncpg/connect_utils.py | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index 4890d007..c65f68a6 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -4,9 +4,11 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations import asyncio import collections +from collections.abc import Callable import enum import functools import getpass @@ -764,14 +766,21 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, class TLSUpgradeProto(asyncio.Protocol): - def __init__(self, loop, host, port, ssl_context, ssl_is_advisory): + def __init__( + self, + loop: asyncio.AbstractEventLoop, + host: str, + port: int, + ssl_context: ssl_module.SSLContext, + ssl_is_advisory: bool, + ) -> None: self.on_data = _create_future(loop) self.host = host self.port = port self.ssl_context = ssl_context self.ssl_is_advisory = ssl_is_advisory - def data_received(self, data): + def data_received(self, data: bytes) -> None: if data == b'S': self.on_data.set_result(True) elif (self.ssl_is_advisory and @@ -789,15 +798,30 @@ def data_received(self, data): 'rejected SSL upgrade'.format( host=self.host, port=self.port))) - def connection_lost(self, exc): + def connection_lost(self, exc: typing.Optional[Exception]) -> None: if not self.on_data.done(): if exc is None: exc = ConnectionError('unexpected connection_lost() call') self.on_data.set_exception(exc) -async def _create_ssl_connection(protocol_factory, host, port, *, - loop, ssl_context, ssl_is_advisory=False): +_ProctolFactoryR = typing.TypeVar( + "_ProctolFactoryR", bound=asyncio.protocols.Protocol +) + + +async def _create_ssl_connection( + # TODO: The return type is a specific combination of subclasses of + # asyncio.protocols.Protocol that we can't express. For now, having the + # return type be dependent on signature of the factory is an improvement + protocol_factory: Callable[[], _ProctolFactoryR], + host: str, + port: int, + *, + loop: asyncio.AbstractEventLoop, + ssl_context: ssl_module.SSLContext, + ssl_is_advisory: bool = False, +) -> typing.Tuple[asyncio.Transport, _ProctolFactoryR]: tr, pr = await loop.create_connection( lambda: TLSUpgradeProto(loop, host, port, @@ -817,6 +841,7 @@ async def _create_ssl_connection(protocol_factory, host, port, *, try: new_tr = await loop.start_tls( tr, pr, ssl_context, server_hostname=host) + assert new_tr is not None except (Exception, asyncio.CancelledError): tr.close() raise From e9bb695413b2ee923310f9106277198cc19c47b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dani=C3=ABl=20van=20Noord?= <13665637+DanielNoord@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:37:53 +0100 Subject: [PATCH 171/193] Add typing to auxiliary classes around `Pool` (#1197) --- asyncpg/pool.py | 84 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 55 insertions(+), 29 deletions(-) diff --git a/asyncpg/pool.py b/asyncpg/pool.py index e3898d53..2e4a7b4f 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -4,12 +4,16 @@ # This module is part of asyncpg and is released under # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import annotations import asyncio +from collections.abc import Awaitable, Callable import functools import inspect import logging import time +from types import TracebackType +from typing import Any, Optional, Type import warnings from . import compat @@ -23,7 +27,14 @@ class PoolConnectionProxyMeta(type): - def __new__(mcls, name, bases, dct, *, wrap=False): + def __new__( + mcls, + name: str, + bases: tuple[Type[Any], ...], + dct: dict[str, Any], + *, + wrap: bool = False, + ) -> PoolConnectionProxyMeta: if wrap: for attrname in dir(connection.Connection): if attrname.startswith('_') or attrname in dct: @@ -44,8 +55,10 @@ def __new__(mcls, name, bases, dct, *, wrap=False): return super().__new__(mcls, name, bases, dct) @staticmethod - def _wrap_connection_method(meth_name, iscoroutine): - def call_con_method(self, *args, **kwargs): + def _wrap_connection_method( + meth_name: str, iscoroutine: bool + ) -> Callable[..., Any]: + def call_con_method(self: Any, *args: Any, **kwargs: Any) -> Any: # This method will be owned by PoolConnectionProxy class. if self._con is None: raise exceptions.InterfaceError( @@ -68,17 +81,18 @@ class PoolConnectionProxy(connection._ConnectionProxy, __slots__ = ('_con', '_holder') - def __init__(self, holder: 'PoolConnectionHolder', - con: connection.Connection): + def __init__( + self, holder: PoolConnectionHolder, con: connection.Connection + ) -> None: self._con = con self._holder = holder con._set_proxy(self) - def __getattr__(self, attr): + def __getattr__(self, attr: str) -> Any: # Proxy all unresolved attributes to the wrapped Connection object. return getattr(self._con, attr) - def _detach(self) -> connection.Connection: + def _detach(self) -> Optional[connection.Connection]: if self._con is None: return @@ -86,7 +100,7 @@ def _detach(self) -> connection.Connection: con._set_proxy(None) return con - def __repr__(self): + def __repr__(self) -> str: if self._con is None: return '<{classname} [released] {id:#x}>'.format( classname=self.__class__.__name__, id=id(self)) @@ -103,27 +117,34 @@ class PoolConnectionHolder: '_inactive_callback', '_timeout', '_generation') - def __init__(self, pool, *, max_queries, setup, max_inactive_time): + def __init__( + self, + pool: "Pool", + *, + max_queries: float, + setup: Optional[Callable[[PoolConnectionProxy], Awaitable[None]]], + max_inactive_time: float, + ) -> None: self._pool = pool - self._con = None - self._proxy = None + self._con: Optional[connection.Connection] = None + self._proxy: Optional[PoolConnectionProxy] = None self._max_queries = max_queries self._max_inactive_time = max_inactive_time self._setup = setup - self._inactive_callback = None - self._in_use = None # type: asyncio.Future - self._timeout = None - self._generation = None + self._inactive_callback: Optional[Callable] = None + self._in_use: Optional[asyncio.Future] = None + self._timeout: Optional[float] = None + self._generation: Optional[int] = None - def is_connected(self): + def is_connected(self) -> bool: return self._con is not None and not self._con.is_closed() - def is_idle(self): + def is_idle(self) -> bool: return not self._in_use - async def connect(self): + async def connect(self) -> None: if self._con is not None: raise exceptions.InternalClientError( 'PoolConnectionHolder.connect() called while another ' @@ -171,7 +192,7 @@ async def acquire(self) -> PoolConnectionProxy: return proxy - async def release(self, timeout): + async def release(self, timeout: Optional[float]) -> None: if self._in_use is None: raise exceptions.InternalClientError( 'PoolConnectionHolder.release() called on ' @@ -234,25 +255,25 @@ async def release(self, timeout): # Rearm the connection inactivity timer. self._setup_inactive_callback() - async def wait_until_released(self): + async def wait_until_released(self) -> None: if self._in_use is None: return else: await self._in_use - async def close(self): + async def close(self) -> None: if self._con is not None: # Connection.close() will call _release_on_close() to # finish holder cleanup. await self._con.close() - def terminate(self): + def terminate(self) -> None: if self._con is not None: # Connection.terminate() will call _release_on_close() to # finish holder cleanup. self._con.terminate() - def _setup_inactive_callback(self): + def _setup_inactive_callback(self) -> None: if self._inactive_callback is not None: raise exceptions.InternalClientError( 'pool connection inactivity timer already exists') @@ -261,12 +282,12 @@ def _setup_inactive_callback(self): self._inactive_callback = self._pool._loop.call_later( self._max_inactive_time, self._deactivate_inactive_connection) - def _maybe_cancel_inactive_callback(self): + def _maybe_cancel_inactive_callback(self) -> None: if self._inactive_callback is not None: self._inactive_callback.cancel() self._inactive_callback = None - def _deactivate_inactive_connection(self): + def _deactivate_inactive_connection(self) -> None: if self._in_use is not None: raise exceptions.InternalClientError( 'attempting to deactivate an acquired connection') @@ -280,12 +301,12 @@ def _deactivate_inactive_connection(self): # so terminate() above will not call the below. self._release_on_close() - def _release_on_close(self): + def _release_on_close(self) -> None: self._maybe_cancel_inactive_callback() self._release() self._con = None - def _release(self): + def _release(self) -> None: """Release this connection holder.""" if self._in_use is None: # The holder is not checked out. @@ -1012,7 +1033,7 @@ class PoolAcquireContext: __slots__ = ('timeout', 'connection', 'done', 'pool') - def __init__(self, pool, timeout): + def __init__(self, pool: Pool, timeout: Optional[float]) -> None: self.pool = pool self.timeout = timeout self.connection = None @@ -1024,7 +1045,12 @@ async def __aenter__(self): self.connection = await self.pool._acquire(self.timeout) return self.connection - async def __aexit__(self, *exc): + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_val: Optional[BaseException] = None, + exc_tb: Optional[TracebackType] = None, + ) -> None: self.done = True con = self.connection self.connection = None From d0797f1f7aed2d28253ec27adac39794a09b2690 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 18 Dec 2024 16:52:07 -0500 Subject: [PATCH 172/193] Fix typing issues around Range/RangeValue (#1196) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In order for the `_RangeValue` protocol to type check properly, it needs to reference `Self`, not itself. Both pyright and mypy show a ton of errors when type checking this before these changes. (I've omitted non-relevant errors here): ``` % pyright tests/test_types.py /Users/dmcgee/projects/sbl/asyncpg/tests/test_types.py /Users/dmcgee/projects/sbl/asyncpg/tests/test_types.py:18:25 - error: Argument of type "Literal[1]" cannot be assigned to parameter "lower" of type "_RV@Range | None" in function "__init__"   Type "Literal[1]" is not assignable to type "_RV@Range | None"     Type "Literal[1]" is not assignable to type "_RangeValue"       "Literal[1]" is incompatible with protocol "_RangeValue"         "__lt__" is an incompatible type           Type "(value: int, /) -> bool" is not assignable to type "(__other: _RV@__lt__, /) -> bool"         "__gt__" is an incompatible type           Type "(value: int, /) -> bool" is not assignable to type "(__other: _RV@__gt__, /) -> bool"     "Literal[1]" is not assignable to "None" (reportArgumentType) /Users/dmcgee/projects/sbl/asyncpg/tests/test_types.py:18:34 - error: Argument of type "Literal[5]" cannot be assigned to parameter "upper" of type "_RV@Range | None" in function "__init__"   Type "Literal[5]" is not assignable to type "_RV@Range | None"     Type "Literal[5]" is not assignable to type "_RangeValue"       "Literal[5]" is incompatible with protocol "_RangeValue"         "__lt__" is an incompatible type           Type "(value: int, /) -> bool" is not assignable to type "(__other: _RV@__lt__, /) -> bool"         "__gt__" is an incompatible type           Type "(value: int, /) -> bool" is not assignable to type "(__other: _RV@__gt__, /) -> bool"     "Literal[5]" is not assignable to "None" (reportArgumentType) ... % mypy tests/test_types.py | grep arg-type tests/test_types.py:18: error: Argument "lower" to "Range" has incompatible type "int"; expected "None" [arg-type] tests/test_types.py:18: error: Argument "upper" to "Range" has incompatible type "int"; expected "None" [arg-type] ... (19 more errors) ``` After this change, the type checking comes back clean: ``` % pyright tests/test_types.py 0 errors, 0 warnings, 0 informations % mypy tests/test_types.py | grep arg-type ``` --- asyncpg/types.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asyncpg/types.py b/asyncpg/types.py index 7a24e24c..c4b66b4a 100644 --- a/asyncpg/types.py +++ b/asyncpg/types.py @@ -63,10 +63,10 @@ class _RangeValue(typing.Protocol): def __eq__(self, __value: object) -> bool: ... - def __lt__(self, __other: _RangeValue) -> bool: + def __lt__(self, __other: Self, /) -> bool: ... - def __gt__(self, __other: _RangeValue) -> bool: + def __gt__(self, __other: Self, /) -> bool: ... From 5a1ee01f8bc8fc6709ef4f636f7a67029d90e26d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 14 Mar 2025 17:58:46 -0700 Subject: [PATCH 173/193] Disable connection URI tests broken by a recent Python fix (#1244) A fix for python/cpython#105704 broke parsing of URIs containing multiple hosts if one or all of the hosts are IPv6 address literals. This blocks CI, so disable those tests for now until this is fixed properly. --- tests/test_connect.py | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/tests/test_connect.py b/tests/test_connect.py index 0037ee5e..024c29e4 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -846,25 +846,26 @@ class TestConnectParams(tb.TestCase): ), }, - { - 'name': 'dsn_ipv6_multi_host', - 'dsn': 'postgresql://user@[2001:db8::1234%25eth0],[::1]/db', - 'result': ([('2001:db8::1234%eth0', 5432), ('::1', 5432)], { - 'database': 'db', - 'user': 'user', - 'target_session_attrs': 'any', - }) - }, - - { - 'name': 'dsn_ipv6_multi_host_port', - 'dsn': 'postgresql://user@[2001:db8::1234]:1111,[::1]:2222/db', - 'result': ([('2001:db8::1234', 1111), ('::1', 2222)], { - 'database': 'db', - 'user': 'user', - 'target_session_attrs': 'any', - }) - }, + # broken by https://github.com/python/cpython/pull/129418 + # { + # 'name': 'dsn_ipv6_multi_host', + # 'dsn': 'postgresql://user@[2001:db8::1234%25eth0],[::1]/db', + # 'result': ([('2001:db8::1234%eth0', 5432), ('::1', 5432)], { + # 'database': 'db', + # 'user': 'user', + # 'target_session_attrs': 'any', + # }) + # }, + + # { + # 'name': 'dsn_ipv6_multi_host_port', + # 'dsn': 'postgresql://user@[2001:db8::1234]:1111,[::1]:2222/db', + # 'result': ([('2001:db8::1234', 1111), ('::1', 2222)], { + # 'database': 'db', + # 'user': 'user', + # 'target_session_attrs': 'any', + # }) + # }, { 'name': 'dsn_ipv6_multi_host_query_part', From 5c9986c4ae4e52a650b6592681ae9ea81ed2d70a Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 14 Mar 2025 18:29:28 -0700 Subject: [PATCH 174/193] Avoid performing type introspection on known types (#1243) Type codec setup functions will no longer attempt to introspect the type if it's one of the known builtin types. Fixes: #1206 Fixes: #1138 Fixes: #1242 --- asyncpg/connection.py | 32 ++++++++++++-------------------- asyncpg/introspection.py | 18 ++++++++---------- 2 files changed, 20 insertions(+), 30 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 3a86466c..ba8be2ef 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -534,26 +534,18 @@ async def _introspect_types(self, typeoids, timeout): return result async def _introspect_type(self, typename, schema): - if ( - schema == 'pg_catalog' - and typename.lower() in protocol.BUILTIN_TYPE_NAME_MAP - ): - typeoid = protocol.BUILTIN_TYPE_NAME_MAP[typename.lower()] - rows = await self._execute( - introspection.TYPE_BY_OID, - [typeoid], - limit=0, - timeout=None, - ignore_custom_codec=True, - ) - else: - rows = await self._execute( - introspection.TYPE_BY_NAME, - [typename, schema], - limit=1, - timeout=None, - ignore_custom_codec=True, - ) + if schema == 'pg_catalog' and not typename.endswith("[]"): + typeoid = protocol.BUILTIN_TYPE_NAME_MAP.get(typename.lower()) + if typeoid is not None: + return introspection.TypeRecord((typeoid, None, b"b")) + + rows = await self._execute( + introspection.TYPE_BY_NAME, + [typename, schema], + limit=1, + timeout=None, + ignore_custom_codec=True, + ) if not rows: raise ValueError( diff --git a/asyncpg/introspection.py b/asyncpg/introspection.py index 641cf700..c3b4e60c 100644 --- a/asyncpg/introspection.py +++ b/asyncpg/introspection.py @@ -7,10 +7,12 @@ from __future__ import annotations import typing +from .protocol.protocol import _create_record # type: ignore if typing.TYPE_CHECKING: from . import protocol + _TYPEINFO_13: typing.Final = '''\ ( SELECT @@ -267,16 +269,12 @@ ''' -TYPE_BY_OID = '''\ -SELECT - t.oid, - t.typelem AS elemtype, - t.typtype AS kind -FROM - pg_catalog.pg_type AS t -WHERE - t.oid = $1 -''' +def TypeRecord( + rec: typing.Tuple[int, typing.Optional[int], bytes], +) -> protocol.Record: + assert len(rec) == 3 + return _create_record( # type: ignore + {"oid": 0, "elemtype": 1, "kind": 2}, rec) # 'b' for a base type, 'd' for a domain, 'e' for enum. From 07e163f60b26d0e42d8a7bc70d990a3f80d6603c Mon Sep 17 00:00:00 2001 From: Andrew Date: Sun, 16 Mar 2025 12:21:40 +0700 Subject: [PATCH 175/193] Fix "implicit optional", e.g. `arg: int = None` (#1239) --- asyncpg/connection.py | 22 +++++++++++++++++++--- asyncpg/pool.py | 15 +++++++++++++-- asyncpg/prepared_stmt.py | 3 ++- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index ba8be2ef..d77c2ced 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -311,7 +311,12 @@ def is_in_transaction(self): """ return self._protocol.is_in_transaction() - async def execute(self, query: str, *args, timeout: float=None) -> str: + async def execute( + self, + query: str, + *args, + timeout: typing.Optional[float]=None, + ) -> str: """Execute an SQL command (or commands). This method can execute many SQL commands at once, when no arguments @@ -358,7 +363,13 @@ async def execute(self, query: str, *args, timeout: float=None) -> str: ) return status.decode() - async def executemany(self, command: str, args, *, timeout: float=None): + async def executemany( + self, + command: str, + args, + *, + timeout: typing.Optional[float]=None, + ): """Execute an SQL *command* for each sequence of arguments in *args*. Example: @@ -749,7 +760,12 @@ async def fetchrow( return data[0] async def fetchmany( - self, query, args, *, timeout: float=None, record_class=None + self, + query, + args, + *, + timeout: typing.Optional[float]=None, + record_class=None, ): """Run a query for each sequence of arguments in *args* and return the results as a list of :class:`Record`. diff --git a/asyncpg/pool.py b/asyncpg/pool.py index 2e4a7b4f..5c7ea9ca 100644 --- a/asyncpg/pool.py +++ b/asyncpg/pool.py @@ -574,7 +574,12 @@ async def _get_new_connection(self): return con - async def execute(self, query: str, *args, timeout: float=None) -> str: + async def execute( + self, + query: str, + *args, + timeout: Optional[float]=None, + ) -> str: """Execute an SQL command (or commands). Pool performs this operation using one of its connections. Other than @@ -586,7 +591,13 @@ async def execute(self, query: str, *args, timeout: float=None) -> str: async with self.acquire() as con: return await con.execute(query, *args, timeout=timeout) - async def executemany(self, command: str, args, *, timeout: float=None): + async def executemany( + self, + command: str, + args, + *, + timeout: Optional[float]=None, + ): """Execute an SQL *command* for each sequence of arguments in *args*. Pool performs this operation using one of its connections. Other than diff --git a/asyncpg/prepared_stmt.py b/asyncpg/prepared_stmt.py index d66a5ad3..0c2d335e 100644 --- a/asyncpg/prepared_stmt.py +++ b/asyncpg/prepared_stmt.py @@ -6,6 +6,7 @@ import json +import typing from . import connresource from . import cursor @@ -232,7 +233,7 @@ async def fetchmany(self, args, *, timeout=None): ) @connresource.guarded - async def executemany(self, args, *, timeout: float=None): + async def executemany(self, args, *, timeout: typing.Optional[float]=None): """Execute the statement for each sequence of arguments in *args*. :param args: An iterable containing sequences of arguments. From 01c0db7b45dbdcdebf6a4a36acfb02f2f96f0a85 Mon Sep 17 00:00:00 2001 From: Andrew Jackson <46945903+AndrewJackson2020@users.noreply.github.com> Date: Sun, 16 Mar 2025 00:24:45 -0500 Subject: [PATCH 176/193] Fix multi port connection string issue (#1222) Co-authored-by: CommanderKeynes --- asyncpg/connect_utils.py | 4 +++- tests/test_connect.py | 14 ++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index c65f68a6..d6c4f051 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -168,13 +168,15 @@ def _read_password_from_pgpass( def _validate_port_spec(hosts, port): - if isinstance(port, list): + if isinstance(port, list) and len(port) > 1: # If there is a list of ports, its length must # match that of the host list. if len(port) != len(hosts): raise exceptions.ClientConfigurationError( 'could not match {} port numbers to {} hosts'.format( len(port), len(hosts))) + elif isinstance(port, list) and len(port) == 1: + port = [port[0] for _ in range(len(hosts))] else: port = [port for _ in range(len(hosts))] diff --git a/tests/test_connect.py b/tests/test_connect.py index 024c29e4..47948265 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1088,6 +1088,20 @@ class TestConnectParams(tb.TestCase): } ) }, + { + 'name': 'multi_host_single_port', + 'dsn': 'postgres:///postgres?host=127.0.0.1,127.0.0.2&port=5432&user=postgres', + 'result': ( + [ + ('127.0.0.1', 5432), + ('127.0.0.2', 5432) + ], { + 'user': 'postgres', + 'database': 'postgres', + 'target_session_attrs': 'any', + } + ) + }, ] @contextlib.contextmanager From 0525b53d4e9ff1830621e306a9a6cd5b64886a22 Mon Sep 17 00:00:00 2001 From: Anton Myroniuk Date: Wed, 19 Mar 2025 22:24:11 +0200 Subject: [PATCH 177/193] Fix lint in test_connect (#1246) --- tests/test_connect.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_connect.py b/tests/test_connect.py index 47948265..62cabc47 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1090,7 +1090,8 @@ class TestConnectParams(tb.TestCase): }, { 'name': 'multi_host_single_port', - 'dsn': 'postgres:///postgres?host=127.0.0.1,127.0.0.2&port=5432&user=postgres', + 'dsn': 'postgres:///postgres?host=127.0.0.1,127.0.0.2&port=5432' + '&user=postgres', 'result': ( [ ('127.0.0.1', 5432), From 5b14653e0b447d956aa01ec658562138e19f0293 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Wed, 19 Mar 2025 13:24:25 -0700 Subject: [PATCH 178/193] Make `prepare()` not use named statements by default when cache is disabled (#1245) We allow disabling the statement cache to circumvent a proxy's inability to cope with them, however there are still some holes where the use of a named statement is attempted: `prepare()` with the default arguments, and `copy_in()`. Fix both so that the use of a named statement is dependent upon whether the prepared statement cache is enabled. Fixes: #1219 Closes: #1218 --- asyncpg/connection.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/asyncpg/connection.py b/asyncpg/connection.py index d77c2ced..e9354109 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -405,7 +405,7 @@ async def _get_statement( query, timeout, *, - named=False, + named: typing.Union[str, bool, None] = False, use_cache=True, ignore_custom_codec=False, record_class=None @@ -639,7 +639,6 @@ async def prepare( query, name=name, timeout=timeout, - use_cache=False, record_class=record_class, ) @@ -647,16 +646,18 @@ async def _prepare( self, query, *, - name=None, + name: typing.Union[str, bool, None] = None, timeout=None, use_cache: bool=False, record_class=None ): self._check_open() + if name is None: + name = self._stmt_cache_enabled stmt = await self._get_statement( query, timeout, - named=True if name is None else name, + named=name, use_cache=use_cache, record_class=record_class, ) @@ -1115,7 +1116,7 @@ async def copy_records_to_table(self, table_name, *, records, intro_query = 'SELECT {cols} FROM {tab} LIMIT 1'.format( tab=tabname, cols=col_list) - intro_ps = await self._prepare(intro_query, use_cache=True) + intro_ps = await self.prepare(intro_query) cond = self._format_copy_where(where) opts = '(FORMAT binary)' From 1d63bb1597f5a24ff182539de470f9c20026012c Mon Sep 17 00:00:00 2001 From: Andrew Jackson <46945903+AndrewJackson2020@users.noreply.github.com> Date: Sat, 11 Oct 2025 00:14:14 -0500 Subject: [PATCH 179/193] Implement connection service file functionality (#1223) --- asyncpg/connect_utils.py | 155 +++++++++++++++++++++++++++++++++++++-- asyncpg/connection.py | 15 ++++ tests/test_connect.py | 113 +++++++++++++++++++++++++++- 3 files changed, 274 insertions(+), 9 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index d6c4f051..d09acc65 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -7,6 +7,7 @@ from __future__ import annotations import asyncio +import configparser import collections from collections.abc import Callable import enum @@ -87,6 +88,9 @@ class SSLNegotiation(compat.StrEnum): PGPASSFILE = '.pgpass' +PG_SERVICEFILE = '.pg_service.conf' + + def _read_password_file(passfile: pathlib.Path) \ -> typing.List[typing.Tuple[str, ...]]: @@ -271,6 +275,7 @@ def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]: def _parse_connect_dsn_and_args(*, dsn, host, port, user, password, passfile, database, ssl, + service, servicefile, direct_tls, server_settings, target_session_attrs, krbsrvname, gsslib): # `auth_hosts` is the version of host information for the purposes @@ -283,6 +288,32 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if dsn: parsed = urllib.parse.urlparse(dsn) + query = None + if parsed.query: + query = urllib.parse.parse_qs(parsed.query, strict_parsing=True) + for key, val in query.items(): + if isinstance(val, list): + query[key] = val[-1] + + if 'service' in query: + val = query.pop('service') + if not service and val: + service = val + + connection_service_file = servicefile + + if connection_service_file is None: + connection_service_file = os.getenv('PGSERVICEFILE') + + if connection_service_file is None: + homedir = compat.get_pg_home_directory() + if homedir: + connection_service_file = homedir / PG_SERVICEFILE + else: + connection_service_file = None + else: + connection_service_file = pathlib.Path(connection_service_file) + if parsed.scheme not in {'postgresql', 'postgres'}: raise exceptions.ClientConfigurationError( 'invalid DSN: scheme is expected to be either ' @@ -317,11 +348,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if password is None and dsn_password: password = urllib.parse.unquote(dsn_password) - if parsed.query: - query = urllib.parse.parse_qs(parsed.query, strict_parsing=True) - for key, val in query.items(): - if isinstance(val, list): - query[key] = val[-1] + if query: if 'port' in query: val = query.pop('port') @@ -408,12 +435,124 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user, if gsslib is None: gsslib = val + if 'service' in query: + val = query.pop('service') + if service is None: + service = val + if query: if server_settings is None: server_settings = query else: server_settings = {**query, **server_settings} + if connection_service_file is not None and service is not None: + pg_service = configparser.ConfigParser() + pg_service.read(connection_service_file) + if service in pg_service.sections(): + service_params = pg_service[service] + if 'port' in service_params: + val = service_params.pop('port') + if not port and val: + port = [int(p) for p in val.split(',')] + + if 'host' in service_params: + val = service_params.pop('host') + if not host and val: + host, port = _parse_hostlist(val, port) + + if 'dbname' in service_params: + val = service_params.pop('dbname') + if database is None: + database = val + + if 'database' in service_params: + val = service_params.pop('database') + if database is None: + database = val + + if 'user' in service_params: + val = service_params.pop('user') + if user is None: + user = val + + if 'password' in service_params: + val = service_params.pop('password') + if password is None: + password = val + + if 'passfile' in service_params: + val = service_params.pop('passfile') + if passfile is None: + passfile = val + + if 'sslmode' in service_params: + val = service_params.pop('sslmode') + if ssl is None: + ssl = val + + if 'sslcert' in service_params: + val = service_params.pop('sslcert') + if sslcert is None: + sslcert = val + + if 'sslkey' in service_params: + val = service_params.pop('sslkey') + if sslkey is None: + sslkey = val + + if 'sslrootcert' in service_params: + val = service_params.pop('sslrootcert') + if sslrootcert is None: + sslrootcert = val + + if 'sslnegotiation' in service_params: + val = service_params.pop('sslnegotiation') + if sslnegotiation is None: + sslnegotiation = val + + if 'sslcrl' in service_params: + val = service_params.pop('sslcrl') + if sslcrl is None: + sslcrl = val + + if 'sslpassword' in service_params: + val = service_params.pop('sslpassword') + if sslpassword is None: + sslpassword = val + + if 'ssl_min_protocol_version' in service_params: + val = service_params.pop( + 'ssl_min_protocol_version' + ) + if ssl_min_protocol_version is None: + ssl_min_protocol_version = val + + if 'ssl_max_protocol_version' in service_params: + val = service_params.pop( + 'ssl_max_protocol_version' + ) + if ssl_max_protocol_version is None: + ssl_max_protocol_version = val + + if 'target_session_attrs' in service_params: + dsn_target_session_attrs = service_params.pop( + 'target_session_attrs' + ) + if target_session_attrs is None: + target_session_attrs = dsn_target_session_attrs + + if 'krbsrvname' in service_params: + val = service_params.pop('krbsrvname') + if krbsrvname is None: + krbsrvname = val + + if 'gsslib' in service_params: + val = service_params.pop('gsslib') + if gsslib is None: + gsslib = val + if not service: + service = os.environ.get('PGSERVICE') if not host: hostspec = os.environ.get('PGHOST') if hostspec: @@ -726,7 +865,8 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, max_cached_statement_lifetime, max_cacheable_statement_size, ssl, direct_tls, server_settings, - target_session_attrs, krbsrvname, gsslib): + target_session_attrs, krbsrvname, gsslib, + service, servicefile): local_vars = locals() for var_name in {'max_cacheable_statement_size', 'max_cached_statement_lifetime', @@ -756,7 +896,8 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, direct_tls=direct_tls, database=database, server_settings=server_settings, target_session_attrs=target_session_attrs, - krbsrvname=krbsrvname, gsslib=gsslib) + krbsrvname=krbsrvname, gsslib=gsslib, + service=service, servicefile=servicefile) config = _ClientConfiguration( command_timeout=command_timeout, diff --git a/asyncpg/connection.py b/asyncpg/connection.py index e9354109..3e7da7b1 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2083,6 +2083,8 @@ async def _do_execute( async def connect(dsn=None, *, host=None, port=None, user=None, password=None, passfile=None, + service=None, + servicefile=None, database=None, loop=None, timeout=60, @@ -2192,6 +2194,14 @@ async def connect(dsn=None, *, (defaults to ``~/.pgpass``, or ``%APPDATA%\postgresql\pgpass.conf`` on Windows). + :param service: + The name of the postgres connection service stored in the postgres + connection service file. + + :param servicefile: + The location of the connnection service file used to store + connection parameters. + :param loop: An asyncio event loop instance. If ``None``, the default event loop will be used. @@ -2404,6 +2414,9 @@ async def connect(dsn=None, *, .. versionchanged:: 0.30.0 Added the *krbsrvname* and *gsslib* parameters. + .. versionchanged:: 0.31.0 + Added the *servicefile* and *service* parameters. + .. _SSLContext: https://docs.python.org/3/library/ssl.html#ssl.SSLContext .. _create_default_context: https://docs.python.org/3/library/ssl.html#ssl.create_default_context @@ -2437,6 +2450,8 @@ async def connect(dsn=None, *, user=user, password=password, passfile=passfile, + service=service, + servicefile=servicefile, ssl=ssl, direct_tls=direct_tls, database=database, diff --git a/tests/test_connect.py b/tests/test_connect.py index 62cabc47..955fb825 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -1132,7 +1132,8 @@ def run_testcase(self, testcase): env = testcase.get('env', {}) test_env = {'PGHOST': None, 'PGPORT': None, 'PGUSER': None, 'PGPASSWORD': None, - 'PGDATABASE': None, 'PGSSLMODE': None} + 'PGDATABASE': None, 'PGSSLMODE': None, + 'PGSERVICE': None, } test_env.update(env) dsn = testcase.get('dsn') @@ -1148,6 +1149,8 @@ def run_testcase(self, testcase): target_session_attrs = testcase.get('target_session_attrs') krbsrvname = testcase.get('krbsrvname') gsslib = testcase.get('gsslib') + service = testcase.get('service') + servicefile = testcase.get('servicefile') expected = testcase.get('result') expected_error = testcase.get('error') @@ -1173,7 +1176,8 @@ def run_testcase(self, testcase): direct_tls=direct_tls, server_settings=server_settings, target_session_attrs=target_session_attrs, - krbsrvname=krbsrvname, gsslib=gsslib) + krbsrvname=krbsrvname, gsslib=gsslib, + service=service, servicefile=servicefile) params = { k: v for k, v in params._asdict().items() @@ -1252,6 +1256,111 @@ def test_connect_params(self): for testcase in self.TESTS: self.run_testcase(testcase) + def test_connect_connection_service_file(self): + connection_service_file = tempfile.NamedTemporaryFile( + 'w+t', delete=False) + connection_service_file.write(textwrap.dedent(''' +[test_service_dbname] +port=5433 +host=somehost +dbname=test_dbname +user=admin +password=test_password +target_session_attrs=primary +krbsrvname=fakekrbsrvname +gsslib=sspi + +[test_service_database] +port=5433 +host=somehost +database=test_dbname +user=admin +password=test_password +target_session_attrs=primary +krbsrvname=fakekrbsrvname +gsslib=sspi + ''')) + connection_service_file.close() + os.chmod(connection_service_file.name, stat.S_IWUSR | stat.S_IRUSR) + try: + # Test connection service file with dbname + self.run_testcase({ + 'dsn': 'postgresql://?service=test_service_dbname', + 'env': { + 'PGSERVICEFILE': connection_service_file.name + }, + 'result': ( + [('somehost', 5433)], + { + 'user': 'admin', + 'password': 'test_password', + 'database': 'test_dbname', + 'target_session_attrs': 'primary', + 'krbsrvname': 'fakekrbsrvname', + 'gsslib': 'sspi', + } + ) + }) + # Test connection service file with database + self.run_testcase({ + 'dsn': 'postgresql://?service=test_service_database', + 'env': { + 'PGSERVICEFILE': connection_service_file.name + }, + 'result': ( + [('somehost', 5433)], + { + 'user': 'admin', + 'password': 'test_password', + 'database': 'test_dbname', + 'target_session_attrs': 'primary', + 'krbsrvname': 'fakekrbsrvname', + 'gsslib': 'sspi', + } + ) + }) + # Test that envvars are overridden by service file + self.run_testcase({ + 'dsn': 'postgresql://?service=test_service_dbname', + 'env': { + 'PGUSER': 'user', + 'PGSERVICEFILE': connection_service_file.name + }, + 'result': ( + [('somehost', 5433)], + { + 'user': 'admin', + 'password': 'test_password', + 'database': 'test_dbname', + 'target_session_attrs': 'primary', + 'krbsrvname': 'fakekrbsrvname', + 'gsslib': 'sspi', + } + ) + }) + # Test that dsn params overwrite service file + self.run_testcase({ + 'dsn': 'postgresql://?service={}&dbname={}'.format( + "test_service_dbname", "test_dbname_dsn" + ), + 'env': { + 'PGSERVICEFILE': connection_service_file.name + }, + 'result': ( + [('somehost', 5433)], + { + 'user': 'admin', + 'password': 'test_password', + 'database': 'test_dbname_dsn', + 'target_session_attrs': 'primary', + 'krbsrvname': 'fakekrbsrvname', + 'gsslib': 'sspi', + } + ) + }) + finally: + os.unlink(connection_service_file.name) + def test_connect_pgpass_regular(self): passfile = tempfile.NamedTemporaryFile('w+t', delete=False) passfile.write(textwrap.dedent(R''' From e94302d26bdf7397fb22fa432a11e9bee365fc22 Mon Sep 17 00:00:00 2001 From: yuliy-openai Date: Fri, 10 Oct 2025 23:23:34 -0700 Subject: [PATCH 180/193] Avoid leaking connections if _can_use_connection fails (#1269) If _can_use_connection fails (say, because of an asyncio timeout), then we may have a full connection that has been created but would be leaked by this function. (note: `_connect_addr` takes care of cleaning up after itself if it fails partway) This is particularly possible in the case of pgbouncer, where we may succeed at establishing a connection much quicker than even a trivial call to the backing database would take. I believe this failure mode was introduced in https://github.com/MagicStack/asyncpg/pull/987 I'm not certain if we should `await` here or just punt the closing of the other connections to a background task (I don't know how risky `close` is on an established connection). --- asyncpg/connect_utils.py | 52 ++++++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py index d09acc65..07c4fdde 100644 --- a/asyncpg/connect_utils.py +++ b/asyncpg/connect_utils.py @@ -1212,30 +1212,36 @@ async def _connect(*, loop, connection_class, record_class, **kwargs): candidates = [] chosen_connection = None last_error = None - for addr in addrs: - try: - conn = await _connect_addr( - addr=addr, - loop=loop, - params=params, - config=config, - connection_class=connection_class, - record_class=record_class, - ) - candidates.append(conn) - if await _can_use_connection(conn, target_attr): - chosen_connection = conn - break - except OSError as ex: - last_error = ex - else: - if target_attr == SessionAttribute.prefer_standby and candidates: - chosen_connection = random.choice(candidates) + try: + for addr in addrs: + try: + conn = await _connect_addr( + addr=addr, + loop=loop, + params=params, + config=config, + connection_class=connection_class, + record_class=record_class, + ) + candidates.append(conn) + if await _can_use_connection(conn, target_attr): + chosen_connection = conn + break + except OSError as ex: + last_error = ex + else: + if target_attr == SessionAttribute.prefer_standby and candidates: + chosen_connection = random.choice(candidates) + finally: - await asyncio.gather( - *(c.close() for c in candidates if c is not chosen_connection), - return_exceptions=True - ) + async def _close_candidates(conns, chosen): + await asyncio.gather( + *(c.close() for c in conns if c is not chosen), + return_exceptions=True + ) + if candidates: + asyncio.create_task( + _close_candidates(candidates, chosen_connection)) if chosen_connection: return chosen_connection From b4d2892174ef5a38bfb82a785aea1804a94c23c3 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Fri, 10 Oct 2025 23:33:50 -0700 Subject: [PATCH 181/193] Bump timeout in Pool tests GitHub Windows runners somehow got slower over time. --- tests/test_pool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pool.py b/tests/test_pool.py index 3f10ae5c..695363b7 100644 --- a/tests/test_pool.py +++ b/tests/test_pool.py @@ -24,7 +24,7 @@ _system = platform.uname().system -POOL_NOMINAL_TIMEOUT = 0.1 +POOL_NOMINAL_TIMEOUT = 0.5 class SlowResetConnection(pg_connection.Connection): From 4c60ae890d1f3800f889679657357f91a6923568 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 16 Oct 2025 08:54:33 -0700 Subject: [PATCH 182/193] Bump version to 0.31.0.dev0 --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 245eee7e..0254079b 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -14,4 +14,4 @@ import typing -__version__: typing.Final = '0.30.0' +__version__: typing.Final = '0.31.0.dev0' From 6c2c4904e61b2cd2f502540730a24e246cff2ebd Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 16 Oct 2025 08:55:14 -0700 Subject: [PATCH 183/193] Drop support for EOL Python 3.8 (#1281) Also, bump github actions while at it. --- .github/workflows/release.yml | 37 +++++++++++++++++++++-------------- .github/workflows/tests.yml | 17 +++++++++++----- README.rst | 2 +- docs/index.rst | 2 +- pyproject.toml | 10 +++++----- setup.py | 4 ++-- 6 files changed, 43 insertions(+), 29 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7c9a366d..31af4d27 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -50,13 +50,14 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 50 submodules: true + persist-credentials: false - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.x" @@ -76,18 +77,20 @@ jobs: outputs: include: ${{ steps.set-matrix.outputs.include }} steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + with: + persist-credentials: false + - uses: actions/setup-python@v6 with: python-version: "3.x" - - run: pip install cibuildwheel==2.21.3 + - run: pip install cibuildwheel==3.2.1 - id: set-matrix run: | MATRIX_INCLUDE=$( { - cibuildwheel --print-build-identifiers --platform linux --arch x86_64,aarch64 | grep cp | jq -nRc '{"only": inputs, "os": "ubuntu-latest"}' \ - && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,arm64 | grep cp | jq -nRc '{"only": inputs, "os": "macos-latest"}' \ - && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -nRc '{"only": inputs, "os": "windows-latest"}' + cibuildwheel --print-build-identifiers --platform linux --archs x86_64,aarch64 | grep cp | jq -nRc '{"only": inputs, "os": "ubuntu-latest"}' \ + && cibuildwheel --print-build-identifiers --platform macos --archs x86_64,arm64 | grep cp | jq -nRc '{"only": inputs, "os": "macos-latest"}' \ + && cibuildwheel --print-build-identifiers --platform windows --archs x86,AMD64 | grep cp | jq -nRc '{"only": inputs, "os": "windows-latest"}' } | jq -sc ) echo "include=$MATRIX_INCLUDE" >> $GITHUB_OUTPUT @@ -110,16 +113,17 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 50 submodules: true + persist-credentials: false - name: Set up QEMU if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 - - uses: pypa/cibuildwheel@7940a4c0e76eb2030e473a5f864f291f63ee879b # v2.21.3 + - uses: pypa/cibuildwheel@9c00cb4f6b517705a3794b22395aedc36257242c # v3.2.1 with: only: ${{ matrix.only }} env: @@ -149,13 +153,14 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 5 submodules: true + persist-credentials: false - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.x" @@ -165,11 +170,12 @@ jobs: make htmldocs - name: Checkout gh-pages - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 5 ref: gh-pages path: docs/gh-pages + persist-credentials: false - name: Sync docs run: | @@ -200,10 +206,11 @@ jobs: deployments: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 5 submodules: false + persist-credentials: false - uses: actions/download-artifact@v4 with: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a4869312..1c55524a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: @@ -27,6 +27,8 @@ jobs: runs-on: ${{ matrix.os }} + permissions: {} + defaults: run: shell: bash @@ -35,10 +37,11 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 50 submodules: true + persist-credentials: false - name: Check if release PR. uses: edgedb/action-release/validate-pr@master @@ -56,7 +59,7 @@ jobs: brew install postgresql - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 if: "!steps.release.outputs.is_release" with: python-version: ${{ matrix.python-version }} @@ -86,14 +89,17 @@ jobs: runs-on: ubuntu-latest + permissions: {} + env: PIP_DISABLE_PIP_VERSION_CHECK: 1 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 50 submodules: true + persist-credentials: false - name: Check if release PR. uses: edgedb/action-release/validate-pr@master @@ -117,7 +123,7 @@ jobs: >> "${GITHUB_ENV}" - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 if: "!steps.release.outputs.is_release" with: python-version: "3.x" @@ -142,6 +148,7 @@ jobs: name: "Regression Tests" needs: [test-platforms, test-postgres] runs-on: ubuntu-latest + permissions: {} steps: - run: echo OK diff --git a/README.rst b/README.rst index 32fd1693..70751b50 100644 --- a/README.rst +++ b/README.rst @@ -13,7 +13,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. You can read more about asyncpg in an introductory `blog post `_. -asyncpg requires Python 3.8 or later and is supported for PostgreSQL +asyncpg requires Python 3.9 or later and is supported for PostgreSQL versions 9.5 to 17. Other PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/docs/index.rst b/docs/index.rst index e0f91813..bbad4397 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,7 +15,7 @@ PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. -**asyncpg** requires Python 3.8 or later and is supported for PostgreSQL +**asyncpg** requires Python 3.9 or later and is supported for PostgreSQL versions 9.5 to 17. Other PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/pyproject.toml b/pyproject.toml index dabb7d8b..9bdcc378 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,8 @@ description = "An asyncio PostgreSQL driver" authors = [{name = "MagicStack Inc", email = "hello@magic.io"}] requires-python = '>=3.8.0' readme = "README.rst" -license = {text = "Apache License, Version 2.0"} +license = "Apache-2.0" +license-files = ["LICENSE"] dynamic = ["version"] keywords = [ "database", @@ -14,16 +15,15 @@ classifiers = [ "Development Status :: 5 - Production/Stable", "Framework :: AsyncIO", "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Database :: Front-Ends", ] @@ -56,8 +56,7 @@ docs = [ [build-system] requires = [ - "setuptools>=60", - "wheel", + "setuptools>=77.0.3", "Cython(>=0.29.24,<4.0.0)" ] build-backend = "setuptools.build_meta" @@ -74,6 +73,7 @@ include = ["asyncpg", "asyncpg.*"] [tool.cibuildwheel] build-frontend = "build" test-extras = "test" +skip = "cp38-*" [tool.cibuildwheel.macos] before-all = ".github/workflows/install-postgres.sh" diff --git a/setup.py b/setup.py index 5f2709d7..29e9d612 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ import sys -if sys.version_info < (3, 8): - raise RuntimeError('asyncpg requires Python 3.8 or greater') +if sys.version_info < (3, 9): + raise RuntimeError('asyncpg requires Python 3.9 or greater') import os import os.path From 7a548166c2b23561915c481bd332013e3a415cfb Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 16 Oct 2025 08:55:45 -0700 Subject: [PATCH 184/193] Fix a couple of missed Python version guards --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9bdcc378..feeebb14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ name = "asyncpg" description = "An asyncio PostgreSQL driver" authors = [{name = "MagicStack Inc", email = "hello@magic.io"}] -requires-python = '>=3.8.0' +requires-python = '>=3.9.0' readme = "README.rst" license = "Apache-2.0" license-files = ["LICENSE"] From 6fe1c494ef5c3069fa9149c48bf9f8f2cd69f95e Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Thu, 16 Oct 2025 09:10:47 -0700 Subject: [PATCH 185/193] Move development deps away from extras and into dependency groups (#1280) --- .github/workflows/release.yml | 3 ++- .github/workflows/tests.yml | 6 ++++-- pyproject.toml | 4 +++- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 31af4d27..263406cb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -166,7 +166,8 @@ jobs: - name: Build docs run: | - pip install -e .[docs] + pip install --group docs + pip install -e . make htmldocs - name: Checkout gh-pages diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1c55524a..de10a2c9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -69,7 +69,8 @@ jobs: run: | [ "$RUNNER_OS" = "Linux" ] && .github/workflows/install-krb5.sh python -m pip install -U pip setuptools wheel - python -m pip install -e .[test] + python -m pip install --group test + python -m pip install -e . - name: Test if: "!steps.release.outputs.is_release" @@ -133,7 +134,8 @@ jobs: run: | [ "$RUNNER_OS" = "Linux" ] && .github/workflows/install-krb5.sh python -m pip install -U pip setuptools wheel - python -m pip install -e .[test] + python -m pip install --group test + python -m pip install -e . - name: Test if: "!steps.release.outputs.is_release" diff --git a/pyproject.toml b/pyproject.toml index feeebb14..131a7372 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,8 @@ gssauth = [ 'gssapi; platform_system != "Windows"', 'sspilib; platform_system == "Windows"', ] + +[dependency-groups] test = [ 'flake8~=6.1', 'flake8-pyi~=24.1.0', @@ -72,7 +74,7 @@ include = ["asyncpg", "asyncpg.*"] [tool.cibuildwheel] build-frontend = "build" -test-extras = "test" +test-groups = "test" skip = "cp38-*" [tool.cibuildwheel.macos] From 9e42642b9110d206706697921b6e697a0972649d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Tue, 21 Oct 2025 12:20:36 -0700 Subject: [PATCH 186/193] Add Python 3.14 support, experimental subinterpreter/freethreading support (#1279) The bulk of the changes here is a rewrite of `recordobj.c` to use modern CPython API to properly isolate the module (PEP 489, PEP 573, PEP 630). This, along with Cython flags, enables support for safely importing `asyncpg` in subinterpreters. The `Record` freelist is now thread-specific, so asyncpg should be thread-safe *at the C level*. Support for subinterpreters and freethreading is EXPERIMENTAL. --- .clang-format | 17 + .clangd | 4 + .github/workflows/tests.yml | 2 +- .gitignore | 2 + asyncpg/connection.py | 4 +- asyncpg/protocol/__init__.py | 3 +- asyncpg/protocol/codecs/base.pxd | 12 + asyncpg/protocol/codecs/base.pyx | 53 +- asyncpg/protocol/coreproto.pyx | 6 +- asyncpg/protocol/encodings.pyx | 2 +- asyncpg/protocol/pgtypes.pxi | 2 +- asyncpg/protocol/prepared_stmt.pyx | 8 +- asyncpg/protocol/protocol.pyi | 22 +- asyncpg/protocol/protocol.pyx | 26 +- asyncpg/protocol/record.pyi | 29 + asyncpg/protocol/record/pythoncapi_compat.h | 2559 +++++++++++++++++ .../record/pythoncapi_compat_extras.h | 72 + asyncpg/protocol/record/recordobj.c | 1094 ++++--- asyncpg/protocol/record/recordobj.h | 24 +- .../{record/__init__.pxd => recordcapi.pxd} | 7 +- pyproject.toml | 9 +- setup.py | 27 +- tests/test_record.py | 7 +- tests/test_subinterpreters.py | 66 + 24 files changed, 3538 insertions(+), 519 deletions(-) create mode 100644 .clang-format create mode 100644 .clangd create mode 100644 asyncpg/protocol/record.pyi create mode 100644 asyncpg/protocol/record/pythoncapi_compat.h create mode 100644 asyncpg/protocol/record/pythoncapi_compat_extras.h rename asyncpg/protocol/{record/__init__.pxd => recordcapi.pxd} (64%) create mode 100644 tests/test_subinterpreters.py diff --git a/.clang-format b/.clang-format new file mode 100644 index 00000000..b2bb93db --- /dev/null +++ b/.clang-format @@ -0,0 +1,17 @@ +# A clang-format style that approximates Python's PEP 7 +BasedOnStyle: Google +AlwaysBreakAfterReturnType: All +AllowShortIfStatementsOnASingleLine: false +AlignAfterOpenBracket: Align +BreakBeforeBraces: Stroustrup +ColumnLimit: 95 +DerivePointerAlignment: false +IndentWidth: 4 +Language: Cpp +PointerAlignment: Right +ReflowComments: true +SpaceBeforeParens: ControlStatements +SpacesInParentheses: false +TabWidth: 4 +UseTab: Never +SortIncludes: false diff --git a/.clangd b/.clangd new file mode 100644 index 00000000..6c88d686 --- /dev/null +++ b/.clangd @@ -0,0 +1,4 @@ +Diagnostics: + Includes: + IgnoreHeader: + - "pythoncapi_compat.*\\.h" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index de10a2c9..451aca9f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # job. strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14", "3.14t"] os: [ubuntu-latest, macos-latest, windows-latest] loop: [asyncio, uvloop] exclude: diff --git a/.gitignore b/.gitignore index 53c0daa1..ec9c96ac 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,8 @@ docs/_build /.pytest_cache/ /.eggs /.vscode +/.zed /.mypy_cache /.venv* /.tox +/compile_commands.json diff --git a/asyncpg/connection.py b/asyncpg/connection.py index 3e7da7b1..71fb04f8 100644 --- a/asyncpg/connection.py +++ b/asyncpg/connection.py @@ -2751,8 +2751,8 @@ def _check_record_class(record_class): and issubclass(record_class, protocol.Record) ): if ( - record_class.__new__ is not object.__new__ - or record_class.__init__ is not object.__init__ + record_class.__new__ is not protocol.Record.__new__ + or record_class.__init__ is not protocol.Record.__init__ ): raise exceptions.InterfaceError( 'record_class must not redefine __new__ or __init__' diff --git a/asyncpg/protocol/__init__.py b/asyncpg/protocol/__init__.py index af9287bd..043454db 100644 --- a/asyncpg/protocol/__init__.py +++ b/asyncpg/protocol/__init__.py @@ -8,4 +8,5 @@ from __future__ import annotations -from .protocol import Protocol, Record, NO_TIMEOUT, BUILTIN_TYPE_NAME_MAP +from .protocol import Protocol, NO_TIMEOUT, BUILTIN_TYPE_NAME_MAP +from .record import Record diff --git a/asyncpg/protocol/codecs/base.pxd b/asyncpg/protocol/codecs/base.pxd index 1cfed833..f5492590 100644 --- a/asyncpg/protocol/codecs/base.pxd +++ b/asyncpg/protocol/codecs/base.pxd @@ -22,6 +22,18 @@ ctypedef object (*codec_decode_func)(Codec codec, FRBuffer *buf) +cdef class CodecMap: + cdef: + void** binary_codec_map + void** text_codec_map + dict extra_codecs + + cdef inline void *get_binary_codec_ptr(self, uint32_t idx) + cdef inline void set_binary_codec_ptr(self, uint32_t idx, void *ptr) + cdef inline void *get_text_codec_ptr(self, uint32_t idx) + cdef inline void set_text_codec_ptr(self, uint32_t idx, void *ptr) + + cdef enum CodecType: CODEC_UNDEFINED = 0 CODEC_C = 1 diff --git a/asyncpg/protocol/codecs/base.pyx b/asyncpg/protocol/codecs/base.pyx index e8b44c74..009598a8 100644 --- a/asyncpg/protocol/codecs/base.pyx +++ b/asyncpg/protocol/codecs/base.pyx @@ -11,9 +11,33 @@ import asyncpg from asyncpg import exceptions -cdef void* binary_codec_map[(MAXSUPPORTEDOID + 1) * 2] -cdef void* text_codec_map[(MAXSUPPORTEDOID + 1) * 2] -cdef dict EXTRA_CODECS = {} +# The class indirection is needed because Cython +# does not (as of 3.1.0) store global cdef variables +# in module state. +@cython.final +cdef class CodecMap: + + def __cinit__(self): + self.extra_codecs = {} + self.binary_codec_map = cpython.PyMem_Calloc( + (MAXSUPPORTEDOID + 1) * 2, sizeof(void *)) + self.text_codec_map = cpython.PyMem_Calloc( + (MAXSUPPORTEDOID + 1) * 2, sizeof(void *)) + + cdef inline void *get_binary_codec_ptr(self, uint32_t idx): + return self.binary_codec_map[idx] + + cdef inline void set_binary_codec_ptr(self, uint32_t idx, void *ptr): + self.binary_codec_map[idx] = ptr + + cdef inline void *get_text_codec_ptr(self, uint32_t idx): + return self.text_codec_map[idx] + + cdef inline void set_text_codec_ptr(self, uint32_t idx, void *ptr): + self.text_codec_map[idx] = ptr + + +codec_map = CodecMap() @cython.final @@ -67,7 +91,7 @@ cdef class Codec: ) if element_names is not None: - self.record_desc = record.ApgRecordDesc_New( + self.record_desc = RecordDescriptor( element_names, tuple(element_names)) else: self.record_desc = None @@ -271,7 +295,7 @@ cdef class Codec: schema=self.schema, data_type=self.name, ) - result = record.ApgRecord_New(asyncpg.Record, self.record_desc, elem_count) + result = self.record_desc.make_record(asyncpg.Record, elem_count) for i in range(elem_count): elem_typ = self.element_type_oids[i] received_elem_typ = hton.unpack_int32(frb_read(buf, 4)) @@ -301,7 +325,7 @@ cdef class Codec: settings, frb_slice_from(&elem_buf, buf, elem_len)) cpython.Py_INCREF(elem) - record.ApgRecord_SET_ITEM(result, i, elem) + recordcapi.ApgRecord_SET_ITEM(result, i, elem) return result @@ -811,9 +835,9 @@ cdef inline Codec get_core_codec( if oid > MAXSUPPORTEDOID: return None if format == PG_FORMAT_BINARY: - ptr = binary_codec_map[oid * xformat] + ptr = (codec_map).get_binary_codec_ptr(oid * xformat) elif format == PG_FORMAT_TEXT: - ptr = text_codec_map[oid * xformat] + ptr = (codec_map).get_text_codec_ptr(oid * xformat) if ptr is NULL: return None @@ -839,7 +863,10 @@ cdef inline Codec get_any_core_codec( cdef inline int has_core_codec(uint32_t oid): - return binary_codec_map[oid] != NULL or text_codec_map[oid] != NULL + return ( + (codec_map).get_binary_codec_ptr(oid) != NULL + or (codec_map).get_text_codec_ptr(oid) != NULL + ) cdef register_core_codec(uint32_t oid, @@ -867,9 +894,9 @@ cdef register_core_codec(uint32_t oid, cpython.Py_INCREF(codec) # immortalize if format == PG_FORMAT_BINARY: - binary_codec_map[oid * xformat] = codec + (codec_map).set_binary_codec_ptr(oid * xformat, codec) elif format == PG_FORMAT_TEXT: - text_codec_map[oid * xformat] = codec + (codec_map).set_text_codec_ptr(oid * xformat, codec) else: raise exceptions.InternalClientError( 'invalid data format: {}'.format(format)) @@ -888,8 +915,8 @@ cdef register_extra_codec(str name, codec = Codec(INVALIDOID) codec.init(name, None, kind, CODEC_C, format, PG_XFORMAT_OBJECT, encode, decode, None, None, None, None, None, None, None, 0) - EXTRA_CODECS[name, format] = codec + (codec_map).extra_codecs[name, format] = codec cdef inline Codec get_extra_codec(str name, ServerDataFormat format): - return EXTRA_CODECS.get((name, format)) + return (codec_map).extra_codecs.get((name, format)) diff --git a/asyncpg/protocol/coreproto.pyx b/asyncpg/protocol/coreproto.pyx index 19857878..da96c412 100644 --- a/asyncpg/protocol/coreproto.pyx +++ b/asyncpg/protocol/coreproto.pyx @@ -11,7 +11,7 @@ import hashlib include "scram.pyx" -cdef dict AUTH_METHOD_NAME = { +AUTH_METHOD_NAME = { AUTH_REQUIRED_KERBEROS: 'kerberosv5', AUTH_REQUIRED_PASSWORD: 'password', AUTH_REQUIRED_PASSWORDMD5: 'md5', @@ -1229,5 +1229,5 @@ cdef class CoreProtocol: pass -cdef bytes SYNC_MESSAGE = bytes(WriteBuffer.new_message(b'S').end_message()) -cdef bytes FLUSH_MESSAGE = bytes(WriteBuffer.new_message(b'H').end_message()) +SYNC_MESSAGE = bytes(WriteBuffer.new_message(b'S').end_message()) +FLUSH_MESSAGE = bytes(WriteBuffer.new_message(b'H').end_message()) diff --git a/asyncpg/protocol/encodings.pyx b/asyncpg/protocol/encodings.pyx index dcd692b7..1463dbe4 100644 --- a/asyncpg/protocol/encodings.pyx +++ b/asyncpg/protocol/encodings.pyx @@ -10,7 +10,7 @@ https://www.postgresql.org/docs/current/static/multibyte.html#CHARSET-TABLE ''' -cdef dict ENCODINGS_MAP = { +ENCODINGS_MAP = { 'abc': 'cp1258', 'alt': 'cp866', 'euc_cn': 'euccn', diff --git a/asyncpg/protocol/pgtypes.pxi b/asyncpg/protocol/pgtypes.pxi index e9bb782f..86f8e663 100644 --- a/asyncpg/protocol/pgtypes.pxi +++ b/asyncpg/protocol/pgtypes.pxi @@ -113,7 +113,7 @@ DEF ANYCOMPATIBLEARRAYOID = 5078 DEF ANYCOMPATIBLENONARRAYOID = 5079 DEF ANYCOMPATIBLERANGEOID = 5080 -cdef ARRAY_TYPES = (_TEXTOID, _OIDOID,) +ARRAY_TYPES = {_TEXTOID, _OIDOID} BUILTIN_TYPE_OID_MAP = { ABSTIMEOID: 'abstime', diff --git a/asyncpg/protocol/prepared_stmt.pyx b/asyncpg/protocol/prepared_stmt.pyx index cb0afa24..4145c664 100644 --- a/asyncpg/protocol/prepared_stmt.pyx +++ b/asyncpg/protocol/prepared_stmt.pyx @@ -230,7 +230,7 @@ cdef class PreparedStatementState: return if self.cols_num == 0: - self.cols_desc = record.ApgRecordDesc_New({}, ()) + self.cols_desc = RecordDescriptor({}, ()) return cols_mapping = collections.OrderedDict() @@ -252,7 +252,7 @@ cdef class PreparedStatementState: codecs.append(codec) - self.cols_desc = record.ApgRecordDesc_New( + self.cols_desc = RecordDescriptor( cols_mapping, tuple(cols_names)) self.rows_codecs = tuple(codecs) @@ -310,7 +310,7 @@ cdef class PreparedStatementState: 'different from what was described ({})'.format( fnum, self.cols_num)) - dec_row = record.ApgRecord_New(self.record_class, self.cols_desc, fnum) + dec_row = self.cols_desc.make_record(self.record_class, fnum) for i in range(fnum): flen = hton.unpack_int32(frb_read(&rbuf, 4)) @@ -333,7 +333,7 @@ cdef class PreparedStatementState: frb_set_len(&rbuf, bl - flen) cpython.Py_INCREF(val) - record.ApgRecord_SET_ITEM(dec_row, i, val) + recordcapi.ApgRecord_SET_ITEM(dec_row, i, val) if frb_get_len(&rbuf) != 0: raise BufferError('unexpected trailing {} bytes in buffer'.format( diff --git a/asyncpg/protocol/protocol.pyi b/asyncpg/protocol/protocol.pyi index b81d13cd..34db6440 100644 --- a/asyncpg/protocol/protocol.pyi +++ b/asyncpg/protocol/protocol.pyi @@ -2,7 +2,7 @@ import asyncio import asyncio.protocols import hmac from codecs import CodecInfo -from collections.abc import Callable, Iterable, Iterator, Sequence +from collections.abc import Callable, Iterable, Sequence from hashlib import md5, sha256 from typing import ( Any, @@ -22,8 +22,8 @@ import asyncpg.pgproto.pgproto from ..connect_utils import _ConnectionParameters from ..pgproto.pgproto import WriteBuffer from ..types import Attribute, Type +from .record import Record -_T = TypeVar('_T') _Record = TypeVar('_Record', bound=Record) _OtherRecord = TypeVar('_OtherRecord', bound=Record) _PreparedStatementState = TypeVar( @@ -254,24 +254,6 @@ class DataCodecConfig: class Protocol(BaseProtocol[_Record], asyncio.protocols.Protocol): ... -class Record: - @overload - def get(self, key: str) -> Any | None: ... - @overload - def get(self, key: str, default: _T) -> Any | _T: ... - def items(self) -> Iterator[tuple[str, Any]]: ... - def keys(self) -> Iterator[str]: ... - def values(self) -> Iterator[Any]: ... - @overload - def __getitem__(self, index: str) -> Any: ... - @overload - def __getitem__(self, index: int) -> Any: ... - @overload - def __getitem__(self, index: slice) -> tuple[Any, ...]: ... - def __iter__(self) -> Iterator[Any]: ... - def __contains__(self, x: object) -> bool: ... - def __len__(self) -> int: ... - class Timer: def __init__(self, budget: float | None) -> None: ... def __enter__(self) -> None: ... diff --git a/asyncpg/protocol/protocol.pyx b/asyncpg/protocol/protocol.pyx index bd2ad05c..acce4e9f 100644 --- a/asyncpg/protocol/protocol.pyx +++ b/asyncpg/protocol/protocol.pyx @@ -34,7 +34,7 @@ from asyncpg.pgproto.pgproto cimport ( from asyncpg.pgproto cimport pgproto from asyncpg.protocol cimport cpythonx -from asyncpg.protocol cimport record +from asyncpg.protocol cimport recordcapi from libc.stdint cimport int8_t, uint8_t, int16_t, uint16_t, \ int32_t, uint32_t, int64_t, uint64_t, \ @@ -46,6 +46,7 @@ from asyncpg import types as apg_types from asyncpg import exceptions as apg_exc from asyncpg.pgproto cimport hton +from asyncpg.protocol.record import Record, RecordDescriptor include "consts.pxi" @@ -135,7 +136,6 @@ cdef class BaseProtocol(CoreProtocol): self.is_reading = False self.transport.pause_reading() - @cython.iterable_coroutine async def prepare(self, stmt_name, query, timeout, *, PreparedStatementState state=None, @@ -164,7 +164,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def bind_execute( self, state: PreparedStatementState, @@ -205,7 +204,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def bind_execute_many( self, state: PreparedStatementState, @@ -267,7 +265,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def bind(self, PreparedStatementState state, args, str portal_name, timeout): @@ -296,7 +293,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def execute(self, PreparedStatementState state, str portal_name, int limit, return_extra, timeout): @@ -326,7 +322,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def close_portal(self, str portal_name, timeout): if self.cancel_waiter is not None: @@ -349,7 +344,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def query(self, query, timeout): if self.cancel_waiter is not None: await self.cancel_waiter @@ -374,7 +368,6 @@ cdef class BaseProtocol(CoreProtocol): finally: return await waiter - @cython.iterable_coroutine async def copy_out(self, copy_stmt, sink, timeout): if self.cancel_waiter is not None: await self.cancel_waiter @@ -428,7 +421,6 @@ cdef class BaseProtocol(CoreProtocol): return status_msg - @cython.iterable_coroutine async def copy_in(self, copy_stmt, reader, data, records, PreparedStatementState record_stmt, timeout): cdef: @@ -567,7 +559,6 @@ cdef class BaseProtocol(CoreProtocol): return status_msg - @cython.iterable_coroutine async def close_statement(self, PreparedStatementState state, timeout): if self.cancel_waiter is not None: await self.cancel_waiter @@ -608,7 +599,6 @@ cdef class BaseProtocol(CoreProtocol): self.transport.abort() self.transport = None - @cython.iterable_coroutine async def close(self, timeout): if self.closing: return @@ -751,7 +741,6 @@ cdef class BaseProtocol(CoreProtocol): self.cancel_sent_waiter is not None ) - @cython.iterable_coroutine async def _wait_for_cancellation(self): if self.cancel_sent_waiter is not None: await self.cancel_sent_waiter @@ -1049,17 +1038,14 @@ def _create_record(object mapping, tuple elems): int32_t i if mapping is None: - desc = record.ApgRecordDesc_New({}, ()) + desc = RecordDescriptor({}, ()) else: - desc = record.ApgRecordDesc_New( + desc = RecordDescriptor( mapping, tuple(mapping) if mapping else ()) - rec = record.ApgRecord_New(Record, desc, len(elems)) + rec = desc.make_record(Record, len(elems)) for i in range(len(elems)): elem = elems[i] cpython.Py_INCREF(elem) - record.ApgRecord_SET_ITEM(rec, i, elem) + recordcapi.ApgRecord_SET_ITEM(rec, i, elem) return rec - - -Record = record.ApgRecord_InitTypes() diff --git a/asyncpg/protocol/record.pyi b/asyncpg/protocol/record.pyi new file mode 100644 index 00000000..308f3109 --- /dev/null +++ b/asyncpg/protocol/record.pyi @@ -0,0 +1,29 @@ +from typing import ( + Any, + TypeVar, + overload, +) + +from collections.abc import Iterator + + +_T = TypeVar("_T") + + +class Record: + @overload + def get(self, key: str) -> Any | None: ... + @overload + def get(self, key: str, default: _T) -> Any | _T: ... + def items(self) -> Iterator[tuple[str, Any]]: ... + def keys(self) -> Iterator[str]: ... + def values(self) -> Iterator[Any]: ... + @overload + def __getitem__(self, index: str) -> Any: ... + @overload + def __getitem__(self, index: int) -> Any: ... + @overload + def __getitem__(self, index: slice) -> tuple[Any, ...]: ... + def __iter__(self) -> Iterator[Any]: ... + def __contains__(self, x: object) -> bool: ... + def __len__(self) -> int: ... diff --git a/asyncpg/protocol/record/pythoncapi_compat.h b/asyncpg/protocol/record/pythoncapi_compat.h new file mode 100644 index 00000000..6a7037ef --- /dev/null +++ b/asyncpg/protocol/record/pythoncapi_compat.h @@ -0,0 +1,2559 @@ +// Header file providing new C API functions to old Python versions. +// +// File distributed under the Zero Clause BSD (0BSD) license. +// Copyright Contributors to the pythoncapi_compat project. +// +// Homepage: +// https://github.com/python/pythoncapi_compat +// +// Latest version: +// https://raw.githubusercontent.com/python/pythoncapi-compat/main/pythoncapi_compat.h +// +// SPDX-License-Identifier: 0BSD + +#ifndef PYTHONCAPI_COMPAT +#define PYTHONCAPI_COMPAT + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include // offsetof() + +// Python 3.11.0b4 added PyFrame_Back() to Python.h +#if PY_VERSION_HEX < 0x030b00B4 && !defined(PYPY_VERSION) +# include "frameobject.h" // PyFrameObject, PyFrame_GetBack() +#endif +#if PY_VERSION_HEX < 0x030C00A3 +# include // T_SHORT, READONLY +#endif + + +#ifndef _Py_CAST +# define _Py_CAST(type, expr) ((type)(expr)) +#endif + +// Static inline functions should use _Py_NULL rather than using directly NULL +// to prevent C++ compiler warnings. On C23 and newer and on C++11 and newer, +// _Py_NULL is defined as nullptr. +#ifndef _Py_NULL +# if (defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L) \ + || (defined(__cplusplus) && __cplusplus >= 201103) +# define _Py_NULL nullptr +# else +# define _Py_NULL NULL +# endif +#endif + +// Cast argument to PyObject* type. +#ifndef _PyObject_CAST +# define _PyObject_CAST(op) _Py_CAST(PyObject*, op) +#endif + +#ifndef Py_BUILD_ASSERT +# define Py_BUILD_ASSERT(cond) \ + do { \ + (void)sizeof(char [1 - 2 * !(cond)]); \ + } while(0) +#endif + + +// bpo-42262 added Py_NewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) +static inline PyObject* _Py_NewRef(PyObject *obj) +{ + Py_INCREF(obj); + return obj; +} +#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef) +static inline PyObject* _Py_XNewRef(PyObject *obj) +{ + Py_XINCREF(obj); + return obj; +} +#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +{ + ob->ob_refcnt = refcnt; +} +#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt) +#endif + + +// Py_SETREF() and Py_XSETREF() were added to Python 3.5.2. +// It is excluded from the limited C API. +#if (PY_VERSION_HEX < 0x03050200 && !defined(Py_SETREF)) && !defined(Py_LIMITED_API) +#define Py_SETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = _PyObject_CAST(src); \ + Py_DECREF(_tmp_dst); \ + } while (0) + +#define Py_XSETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = _PyObject_CAST(src); \ + Py_XDECREF(_tmp_dst); \ + } while (0) +#endif + + +// bpo-43753 added Py_Is(), Py_IsNone(), Py_IsTrue() and Py_IsFalse() +// to Python 3.10.0b1. +#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_Is) +# define Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone) +# define Py_IsNone(x) Py_Is(x, Py_None) +#endif +#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsTrue) +# define Py_IsTrue(x) Py_Is(x, Py_True) +#endif +#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsFalse) +# define Py_IsFalse(x) Py_Is(x, Py_False) +#endif + + +// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) +static inline void _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +{ + ob->ob_type = type; +} +#define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type) +#endif + + +// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) +static inline void _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +{ + ob->ob_size = size; +} +#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) +#endif + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 || defined(PYPY_VERSION) +static inline PyCodeObject* PyFrame_GetCode(PyFrameObject *frame) +{ + assert(frame != _Py_NULL); + assert(frame->f_code != _Py_NULL); + return _Py_CAST(PyCodeObject*, Py_NewRef(frame->f_code)); +} +#endif + +static inline PyCodeObject* _PyFrame_GetCodeBorrow(PyFrameObject *frame) +{ + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; +} + + +// bpo-40421 added PyFrame_GetBack() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) +static inline PyFrameObject* PyFrame_GetBack(PyFrameObject *frame) +{ + assert(frame != _Py_NULL); + return _Py_CAST(PyFrameObject*, Py_XNewRef(frame->f_back)); +} +#endif + +#if !defined(PYPY_VERSION) +static inline PyFrameObject* _PyFrame_GetBackBorrow(PyFrameObject *frame) +{ + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; +} +#endif + + +// bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetLocals(PyFrameObject *frame) +{ +#if PY_VERSION_HEX >= 0x030400B1 + if (PyFrame_FastToLocalsWithError(frame) < 0) { + return NULL; + } +#else + PyFrame_FastToLocals(frame); +#endif + return Py_NewRef(frame->f_locals); +} +#endif + + +// bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetGlobals(PyFrameObject *frame) +{ + return Py_NewRef(frame->f_globals); +} +#endif + + +// bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetBuiltins(PyFrameObject *frame) +{ + return Py_NewRef(frame->f_builtins); +} +#endif + + +// bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1 +#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) +static inline int PyFrame_GetLasti(PyFrameObject *frame) +{ +#if PY_VERSION_HEX >= 0x030A00A7 + // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset, + // not a bytes offset anymore. Python uses 16-bit "wordcode" (2 bytes) + // instructions. + if (frame->f_lasti < 0) { + return -1; + } + return frame->f_lasti * 2; +#else + return frame->f_lasti; +#endif +} +#endif + + +// gh-91248 added PyFrame_GetVar() to Python 3.12.0a2 +#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) +{ + PyObject *locals, *value; + + locals = PyFrame_GetLocals(frame); + if (locals == NULL) { + return NULL; + } +#if PY_VERSION_HEX >= 0x03000000 + value = PyDict_GetItemWithError(locals, name); +#else + value = _PyDict_GetItemWithError(locals, name); +#endif + Py_DECREF(locals); + + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } +#if PY_VERSION_HEX >= 0x03000000 + PyErr_Format(PyExc_NameError, "variable %R does not exist", name); +#else + PyErr_SetString(PyExc_NameError, "variable does not exist"); +#endif + return NULL; + } + return Py_NewRef(value); +} +#endif + + +// gh-91248 added PyFrame_GetVarString() to Python 3.12.0a2 +#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) +static inline PyObject* +PyFrame_GetVarString(PyFrameObject *frame, const char *name) +{ + PyObject *name_obj, *value; +#if PY_VERSION_HEX >= 0x03000000 + name_obj = PyUnicode_FromString(name); +#else + name_obj = PyString_FromString(name); +#endif + if (name_obj == NULL) { + return NULL; + } + value = PyFrame_GetVar(frame, name_obj); + Py_DECREF(name_obj); + return value; +} +#endif + + +// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 || (defined(PYPY_VERSION) && PY_VERSION_HEX < 0x030B0000) +static inline PyInterpreterState * +PyThreadState_GetInterpreter(PyThreadState *tstate) +{ + assert(tstate != _Py_NULL); + return tstate->interp; +} +#endif + + +// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) +static inline PyFrameObject* PyThreadState_GetFrame(PyThreadState *tstate) +{ + assert(tstate != _Py_NULL); + return _Py_CAST(PyFrameObject *, Py_XNewRef(tstate->frame)); +} +#endif + +#if !defined(PYPY_VERSION) +static inline PyFrameObject* +_PyThreadState_GetFrameBorrow(PyThreadState *tstate) +{ + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 || defined(PYPY_VERSION) +static inline PyInterpreterState* PyInterpreterState_Get(void) +{ + PyThreadState *tstate; + PyInterpreterState *interp; + + tstate = PyThreadState_GET(); + if (tstate == _Py_NULL) { + Py_FatalError("GIL released (tstate is NULL)"); + } + interp = tstate->interp; + if (interp == _Py_NULL) { + Py_FatalError("no current interpreter"); + } + return interp; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 +#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) +static inline uint64_t PyThreadState_GetID(PyThreadState *tstate) +{ + assert(tstate != _Py_NULL); + return tstate->id; +} +#endif + +// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_EnterTracing(PyThreadState *tstate) +{ + tstate->tracing++; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = 0; +#else + tstate->use_tracing = 0; +#endif +} +#endif + +// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) +{ + int use_tracing = (tstate->c_tracefunc != _Py_NULL + || tstate->c_profilefunc != _Py_NULL); + tstate->tracing--; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = use_tracing; +#else + tstate->use_tracing = use_tracing; +#endif +} +#endif + + +// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 +// PyObject_CallNoArgs() added to PyPy 3.9.16-v7.3.11 +#if !defined(PyObject_CallNoArgs) && PY_VERSION_HEX < 0x030900A1 +static inline PyObject* PyObject_CallNoArgs(PyObject *func) +{ + return PyObject_CallFunctionObjArgs(func, NULL); +} +#endif + + +// bpo-39245 made PyObject_CallOneArg() public (previously called +// _PyObject_CallOneArg) in Python 3.9.0a4 +// PyObject_CallOneArg() added to PyPy 3.9.16-v7.3.11 +#if !defined(PyObject_CallOneArg) && PY_VERSION_HEX < 0x030900A4 +static inline PyObject* PyObject_CallOneArg(PyObject *func, PyObject *arg) +{ + return PyObject_CallFunctionObjArgs(func, arg, NULL); +} +#endif + + +// bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030A00A3 +static inline int +PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) +{ + int res; + + if (!value && !PyErr_Occurred()) { + // PyModule_AddObject() raises TypeError in this case + PyErr_SetString(PyExc_SystemError, + "PyModule_AddObjectRef() must be called " + "with an exception raised if value is NULL"); + return -1; + } + + Py_XINCREF(value); + res = PyModule_AddObject(module, name, value); + if (res < 0) { + Py_XDECREF(value); + } + return res; +} +#endif + + +// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline int PyModule_AddType(PyObject *module, PyTypeObject *type) +{ + const char *name, *dot; + + if (PyType_Ready(type) < 0) { + return -1; + } + + // inline _PyType_Name() + name = type->tp_name; + assert(name != _Py_NULL); + dot = strrchr(name, '.'); + if (dot != _Py_NULL) { + name = dot + 1; + } + + return PyModule_AddObjectRef(module, name, _PyObject_CAST(type)); +} +#endif + + +// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. +// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. +#if PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) +static inline int PyObject_GC_IsTracked(PyObject* obj) +{ + return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); +} +#endif + +// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. +// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. +#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 && !defined(PYPY_VERSION) +static inline int PyObject_GC_IsFinalized(PyObject *obj) +{ + PyGC_Head *gc = _Py_CAST(PyGC_Head*, obj) - 1; + return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(gc)); +} +#endif + + +// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) +static inline int _Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { + return Py_TYPE(ob) == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST(ob), type) +#endif + + +// bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7. +// bpo-11734 added _PyFloat_Pack2() and _PyFloat_Unpack2() to Python 3.6.0b1. +// Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal +// C API: Python 3.11a2-3.11a6 versions are not supported. +#if 0x030600B1 <= PY_VERSION_HEX && PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) +static inline int PyFloat_Pack2(double x, char *p, int le) +{ return _PyFloat_Pack2(x, (unsigned char*)p, le); } + +static inline double PyFloat_Unpack2(const char *p, int le) +{ return _PyFloat_Unpack2((const unsigned char *)p, le); } +#endif + + +// bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and +// PyFloat_Unpack8() to Python 3.11a7. +// Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4() +// and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions +// are not supported. +#if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) +static inline int PyFloat_Pack4(double x, char *p, int le) +{ return _PyFloat_Pack4(x, (unsigned char*)p, le); } + +static inline int PyFloat_Pack8(double x, char *p, int le) +{ return _PyFloat_Pack8(x, (unsigned char*)p, le); } + +static inline double PyFloat_Unpack4(const char *p, int le) +{ return _PyFloat_Unpack4((const unsigned char *)p, le); } + +static inline double PyFloat_Unpack8(const char *p, int le) +{ return _PyFloat_Unpack8((const unsigned char *)p, le); } +#endif + + +// gh-92154 added PyCode_GetCode() to Python 3.11.0b1 +#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetCode(PyCodeObject *code) +{ + return Py_NewRef(code->co_code); +} +#endif + + +// gh-95008 added PyCode_GetVarnames() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetVarnames(PyCodeObject *code) +{ + return Py_NewRef(code->co_varnames); +} +#endif + +// gh-95008 added PyCode_GetFreevars() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetFreevars(PyCodeObject *code) +{ + return Py_NewRef(code->co_freevars); +} +#endif + +// gh-95008 added PyCode_GetCellvars() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetCellvars(PyCodeObject *code) +{ + return Py_NewRef(code->co_cellvars); +} +#endif + + +// Py_UNUSED() was added to Python 3.4.0b2. +#if PY_VERSION_HEX < 0x030400B2 && !defined(Py_UNUSED) +# if defined(__GNUC__) || defined(__clang__) +# define Py_UNUSED(name) _unused_ ## name __attribute__((unused)) +# else +# define Py_UNUSED(name) _unused_ ## name +# endif +#endif + + +// gh-105922 added PyImport_AddModuleRef() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A0 +static inline PyObject* PyImport_AddModuleRef(const char *name) +{ + return Py_XNewRef(PyImport_AddModule(name)); +} +#endif + + +// gh-105927 added PyWeakref_GetRef() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D0000 +static inline int PyWeakref_GetRef(PyObject *ref, PyObject **pobj) +{ + PyObject *obj; + if (ref != NULL && !PyWeakref_Check(ref)) { + *pobj = NULL; + PyErr_SetString(PyExc_TypeError, "expected a weakref"); + return -1; + } + obj = PyWeakref_GetObject(ref); + if (obj == NULL) { + // SystemError if ref is NULL + *pobj = NULL; + return -1; + } + if (obj == Py_None) { + *pobj = NULL; + return 0; + } + *pobj = Py_NewRef(obj); + return 1; +} +#endif + + +// bpo-36974 added PY_VECTORCALL_ARGUMENTS_OFFSET to Python 3.8b1 +#ifndef PY_VECTORCALL_ARGUMENTS_OFFSET +# define PY_VECTORCALL_ARGUMENTS_OFFSET (_Py_CAST(size_t, 1) << (8 * sizeof(size_t) - 1)) +#endif + +// bpo-36974 added PyVectorcall_NARGS() to Python 3.8b1 +#if PY_VERSION_HEX < 0x030800B1 +static inline Py_ssize_t PyVectorcall_NARGS(size_t n) +{ + return n & ~PY_VECTORCALL_ARGUMENTS_OFFSET; +} +#endif + + +// gh-105922 added PyObject_Vectorcall() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_Vectorcall(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames) +{ +#if PY_VERSION_HEX >= 0x030800B1 && !defined(PYPY_VERSION) + // bpo-36974 added _PyObject_Vectorcall() to Python 3.8.0b1 + return _PyObject_Vectorcall(callable, args, nargsf, kwnames); +#else + PyObject *posargs = NULL, *kwargs = NULL; + PyObject *res; + Py_ssize_t nposargs, nkwargs, i; + + if (nargsf != 0 && args == NULL) { + PyErr_BadInternalCall(); + goto error; + } + if (kwnames != NULL && !PyTuple_Check(kwnames)) { + PyErr_BadInternalCall(); + goto error; + } + + nposargs = (Py_ssize_t)PyVectorcall_NARGS(nargsf); + if (kwnames) { + nkwargs = PyTuple_GET_SIZE(kwnames); + } + else { + nkwargs = 0; + } + + posargs = PyTuple_New(nposargs); + if (posargs == NULL) { + goto error; + } + if (nposargs) { + for (i=0; i < nposargs; i++) { + PyTuple_SET_ITEM(posargs, i, Py_NewRef(*args)); + args++; + } + } + + if (nkwargs) { + kwargs = PyDict_New(); + if (kwargs == NULL) { + goto error; + } + + for (i = 0; i < nkwargs; i++) { + PyObject *key = PyTuple_GET_ITEM(kwnames, i); + PyObject *value = *args; + args++; + if (PyDict_SetItem(kwargs, key, value) < 0) { + goto error; + } + } + } + else { + kwargs = NULL; + } + + res = PyObject_Call(callable, posargs, kwargs); + Py_DECREF(posargs); + Py_XDECREF(kwargs); + return res; + +error: + Py_DECREF(posargs); + Py_XDECREF(kwargs); + return NULL; +#endif +} +#endif + + +// gh-106521 added PyObject_GetOptionalAttr() and +// PyObject_GetOptionalAttrString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_GetOptionalAttr(PyObject *obj, PyObject *attr_name, PyObject **result) +{ + // bpo-32571 added _PyObject_LookupAttr() to Python 3.7.0b1 +#if PY_VERSION_HEX >= 0x030700B1 && !defined(PYPY_VERSION) + return _PyObject_LookupAttr(obj, attr_name, result); +#else + *result = PyObject_GetAttr(obj, attr_name); + if (*result != NULL) { + return 1; + } + if (!PyErr_Occurred()) { + return 0; + } + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + return 0; + } + return -1; +#endif +} + +static inline int +PyObject_GetOptionalAttrString(PyObject *obj, const char *attr_name, PyObject **result) +{ + PyObject *name_obj; + int rc; +#if PY_VERSION_HEX >= 0x03000000 + name_obj = PyUnicode_FromString(attr_name); +#else + name_obj = PyString_FromString(attr_name); +#endif + if (name_obj == NULL) { + *result = NULL; + return -1; + } + rc = PyObject_GetOptionalAttr(obj, name_obj, result); + Py_DECREF(name_obj); + return rc; +} +#endif + + +// gh-106307 added PyObject_GetOptionalAttr() and +// PyMapping_GetOptionalItemString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyMapping_GetOptionalItem(PyObject *obj, PyObject *key, PyObject **result) +{ + *result = PyObject_GetItem(obj, key); + if (*result) { + return 1; + } + if (!PyErr_ExceptionMatches(PyExc_KeyError)) { + return -1; + } + PyErr_Clear(); + return 0; +} + +static inline int +PyMapping_GetOptionalItemString(PyObject *obj, const char *key, PyObject **result) +{ + PyObject *key_obj; + int rc; +#if PY_VERSION_HEX >= 0x03000000 + key_obj = PyUnicode_FromString(key); +#else + key_obj = PyString_FromString(key); +#endif + if (key_obj == NULL) { + *result = NULL; + return -1; + } + rc = PyMapping_GetOptionalItem(obj, key_obj, result); + Py_DECREF(key_obj); + return rc; +} +#endif + +// gh-108511 added PyMapping_HasKeyWithError() and +// PyMapping_HasKeyStringWithError() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyMapping_HasKeyWithError(PyObject *obj, PyObject *key) +{ + PyObject *res; + int rc = PyMapping_GetOptionalItem(obj, key, &res); + Py_XDECREF(res); + return rc; +} + +static inline int +PyMapping_HasKeyStringWithError(PyObject *obj, const char *key) +{ + PyObject *res; + int rc = PyMapping_GetOptionalItemString(obj, key, &res); + Py_XDECREF(res); + return rc; +} +#endif + + +// gh-108511 added PyObject_HasAttrWithError() and +// PyObject_HasAttrStringWithError() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_HasAttrWithError(PyObject *obj, PyObject *attr) +{ + PyObject *res; + int rc = PyObject_GetOptionalAttr(obj, attr, &res); + Py_XDECREF(res); + return rc; +} + +static inline int +PyObject_HasAttrStringWithError(PyObject *obj, const char *attr) +{ + PyObject *res; + int rc = PyObject_GetOptionalAttrString(obj, attr, &res); + Py_XDECREF(res); + return rc; +} +#endif + + +// gh-106004 added PyDict_GetItemRef() and PyDict_GetItemStringRef() +// to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result) +{ +#if PY_VERSION_HEX >= 0x03000000 + PyObject *item = PyDict_GetItemWithError(mp, key); +#else + PyObject *item = _PyDict_GetItemWithError(mp, key); +#endif + if (item != NULL) { + *result = Py_NewRef(item); + return 1; // found + } + if (!PyErr_Occurred()) { + *result = NULL; + return 0; // not found + } + *result = NULL; + return -1; +} + +static inline int +PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result) +{ + int res; +#if PY_VERSION_HEX >= 0x03000000 + PyObject *key_obj = PyUnicode_FromString(key); +#else + PyObject *key_obj = PyString_FromString(key); +#endif + if (key_obj == NULL) { + *result = NULL; + return -1; + } + res = PyDict_GetItemRef(mp, key_obj, result); + Py_DECREF(key_obj); + return res; +} +#endif + + +// gh-106307 added PyModule_Add() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyModule_Add(PyObject *mod, const char *name, PyObject *value) +{ + int res = PyModule_AddObjectRef(mod, name, value); + Py_XDECREF(value); + return res; +} +#endif + + +// gh-108014 added Py_IsFinalizing() to Python 3.13.0a1 +// bpo-1856 added _Py_Finalizing to Python 3.2.1b1. +// _Py_IsFinalizing() was added to PyPy 7.3.0. +#if (0x030201B1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030D00A1) \ + && (!defined(PYPY_VERSION_NUM) || PYPY_VERSION_NUM >= 0x7030000) +static inline int Py_IsFinalizing(void) +{ +#if PY_VERSION_HEX >= 0x030700A1 + // _Py_IsFinalizing() was added to Python 3.7.0a1. + return _Py_IsFinalizing(); +#else + return (_Py_Finalizing != NULL); +#endif +} +#endif + + +// gh-108323 added PyDict_ContainsString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int PyDict_ContainsString(PyObject *op, const char *key) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + return -1; + } + int res = PyDict_Contains(op, key_obj); + Py_DECREF(key_obj); + return res; +} +#endif + + +// gh-108445 added PyLong_AsInt() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int PyLong_AsInt(PyObject *obj) +{ +#ifdef PYPY_VERSION + long value = PyLong_AsLong(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + if (value < (long)INT_MIN || (long)INT_MAX < value) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C int"); + return -1; + } + return (int)value; +#else + return _PyLong_AsInt(obj); +#endif +} +#endif + + +// gh-107073 added PyObject_VisitManagedDict() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg) +{ + PyObject **dict = _PyObject_GetDictPtr(obj); + if (dict == NULL || *dict == NULL) { + return -1; + } + Py_VISIT(*dict); + return 0; +} + +static inline void +PyObject_ClearManagedDict(PyObject *obj) +{ + PyObject **dict = _PyObject_GetDictPtr(obj); + if (dict == NULL || *dict == NULL) { + return; + } + Py_CLEAR(*dict); +} +#endif + +// gh-108867 added PyThreadState_GetUnchecked() to Python 3.13.0a1 +// Python 3.5.2 added _PyThreadState_UncheckedGet(). +#if PY_VERSION_HEX >= 0x03050200 && PY_VERSION_HEX < 0x030D00A1 +static inline PyThreadState* +PyThreadState_GetUnchecked(void) +{ + return _PyThreadState_UncheckedGet(); +} +#endif + +// gh-110289 added PyUnicode_EqualToUTF8() and PyUnicode_EqualToUTF8AndSize() +// to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyUnicode_EqualToUTF8AndSize(PyObject *unicode, const char *str, Py_ssize_t str_len) +{ + Py_ssize_t len; + const void *utf8; + PyObject *exc_type, *exc_value, *exc_tb; + int res; + + // API cannot report errors so save/restore the exception + PyErr_Fetch(&exc_type, &exc_value, &exc_tb); + + // Python 3.3.0a1 added PyUnicode_AsUTF8AndSize() +#if PY_VERSION_HEX >= 0x030300A1 + if (PyUnicode_IS_ASCII(unicode)) { + utf8 = PyUnicode_DATA(unicode); + len = PyUnicode_GET_LENGTH(unicode); + } + else { + utf8 = PyUnicode_AsUTF8AndSize(unicode, &len); + if (utf8 == NULL) { + // Memory allocation failure. The API cannot report error, + // so ignore the exception and return 0. + res = 0; + goto done; + } + } + + if (len != str_len) { + res = 0; + goto done; + } + res = (memcmp(utf8, str, (size_t)len) == 0); +#else + PyObject *bytes = PyUnicode_AsUTF8String(unicode); + if (bytes == NULL) { + // Memory allocation failure. The API cannot report error, + // so ignore the exception and return 0. + res = 0; + goto done; + } + +#if PY_VERSION_HEX >= 0x03000000 + len = PyBytes_GET_SIZE(bytes); + utf8 = PyBytes_AS_STRING(bytes); +#else + len = PyString_GET_SIZE(bytes); + utf8 = PyString_AS_STRING(bytes); +#endif + if (len != str_len) { + Py_DECREF(bytes); + res = 0; + goto done; + } + + res = (memcmp(utf8, str, (size_t)len) == 0); + Py_DECREF(bytes); +#endif + +done: + PyErr_Restore(exc_type, exc_value, exc_tb); + return res; +} + +static inline int +PyUnicode_EqualToUTF8(PyObject *unicode, const char *str) +{ + return PyUnicode_EqualToUTF8AndSize(unicode, str, (Py_ssize_t)strlen(str)); +} +#endif + + +// gh-111138 added PyList_Extend() and PyList_Clear() to Python 3.13.0a2 +#if PY_VERSION_HEX < 0x030D00A2 +static inline int +PyList_Extend(PyObject *list, PyObject *iterable) +{ + return PyList_SetSlice(list, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, iterable); +} + +static inline int +PyList_Clear(PyObject *list) +{ + return PyList_SetSlice(list, 0, PY_SSIZE_T_MAX, NULL); +} +#endif + +// gh-111262 added PyDict_Pop() and PyDict_PopString() to Python 3.13.0a2 +#if PY_VERSION_HEX < 0x030D00A2 +static inline int +PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result) +{ + PyObject *value; + + if (!PyDict_Check(dict)) { + PyErr_BadInternalCall(); + if (result) { + *result = NULL; + } + return -1; + } + + // bpo-16991 added _PyDict_Pop() to Python 3.5.0b2. + // Python 3.6.0b3 changed _PyDict_Pop() first argument type to PyObject*. + // Python 3.13.0a1 removed _PyDict_Pop(). +#if defined(PYPY_VERSION) || PY_VERSION_HEX < 0x030500b2 || PY_VERSION_HEX >= 0x030D0000 + value = PyObject_CallMethod(dict, "pop", "O", key); +#elif PY_VERSION_HEX < 0x030600b3 + value = _PyDict_Pop(_Py_CAST(PyDictObject*, dict), key, NULL); +#else + value = _PyDict_Pop(dict, key, NULL); +#endif + if (value == NULL) { + if (result) { + *result = NULL; + } + if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) { + return -1; + } + PyErr_Clear(); + return 0; + } + if (result) { + *result = value; + } + else { + Py_DECREF(value); + } + return 1; +} + +static inline int +PyDict_PopString(PyObject *dict, const char *key, PyObject **result) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + if (result != NULL) { + *result = NULL; + } + return -1; + } + + int res = PyDict_Pop(dict, key_obj, result); + Py_DECREF(key_obj); + return res; +} +#endif + + +#if PY_VERSION_HEX < 0x030200A4 +// Python 3.2.0a4 added Py_hash_t type +typedef Py_ssize_t Py_hash_t; +#endif + + +// gh-111545 added Py_HashPointer() to Python 3.13.0a3 +#if PY_VERSION_HEX < 0x030D00A3 +static inline Py_hash_t Py_HashPointer(const void *ptr) +{ +#if PY_VERSION_HEX >= 0x030900A4 && !defined(PYPY_VERSION) + return _Py_HashPointer(ptr); +#else + return _Py_HashPointer(_Py_CAST(void*, ptr)); +#endif +} +#endif + + +// Python 3.13a4 added a PyTime API. +// Use the private API added to Python 3.5. +#if PY_VERSION_HEX < 0x030D00A4 && PY_VERSION_HEX >= 0x03050000 +typedef _PyTime_t PyTime_t; +#define PyTime_MIN _PyTime_MIN +#define PyTime_MAX _PyTime_MAX + +static inline double PyTime_AsSecondsDouble(PyTime_t t) +{ return _PyTime_AsSecondsDouble(t); } + +static inline int PyTime_Monotonic(PyTime_t *result) +{ return _PyTime_GetMonotonicClockWithInfo(result, NULL); } + +static inline int PyTime_Time(PyTime_t *result) +{ return _PyTime_GetSystemClockWithInfo(result, NULL); } + +static inline int PyTime_PerfCounter(PyTime_t *result) +{ +#if PY_VERSION_HEX >= 0x03070000 && !defined(PYPY_VERSION) + return _PyTime_GetPerfCounterWithInfo(result, NULL); +#elif PY_VERSION_HEX >= 0x03070000 + // Call time.perf_counter_ns() and convert Python int object to PyTime_t. + // Cache time.perf_counter_ns() function for best performance. + static PyObject *func = NULL; + if (func == NULL) { + PyObject *mod = PyImport_ImportModule("time"); + if (mod == NULL) { + return -1; + } + + func = PyObject_GetAttrString(mod, "perf_counter_ns"); + Py_DECREF(mod); + if (func == NULL) { + return -1; + } + } + + PyObject *res = PyObject_CallNoArgs(func); + if (res == NULL) { + return -1; + } + long long value = PyLong_AsLongLong(res); + Py_DECREF(res); + + if (value == -1 && PyErr_Occurred()) { + return -1; + } + + Py_BUILD_ASSERT(sizeof(value) >= sizeof(PyTime_t)); + *result = (PyTime_t)value; + return 0; +#else + // Call time.perf_counter() and convert C double to PyTime_t. + // Cache time.perf_counter() function for best performance. + static PyObject *func = NULL; + if (func == NULL) { + PyObject *mod = PyImport_ImportModule("time"); + if (mod == NULL) { + return -1; + } + + func = PyObject_GetAttrString(mod, "perf_counter"); + Py_DECREF(mod); + if (func == NULL) { + return -1; + } + } + + PyObject *res = PyObject_CallNoArgs(func); + if (res == NULL) { + return -1; + } + double d = PyFloat_AsDouble(res); + Py_DECREF(res); + + if (d == -1.0 && PyErr_Occurred()) { + return -1; + } + + // Avoid floor() to avoid having to link to libm + *result = (PyTime_t)(d * 1e9); + return 0; +#endif +} + +#endif + +// gh-111389 added hash constants to Python 3.13.0a5. These constants were +// added first as private macros to Python 3.4.0b1 and PyPy 7.3.8. +#if (!defined(PyHASH_BITS) \ + && ((!defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x030400B1) \ + || (defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x03070000 \ + && PYPY_VERSION_NUM >= 0x07030800))) +# define PyHASH_BITS _PyHASH_BITS +# define PyHASH_MODULUS _PyHASH_MODULUS +# define PyHASH_INF _PyHASH_INF +# define PyHASH_IMAG _PyHASH_IMAG +#endif + + +// gh-111545 added Py_GetConstant() and Py_GetConstantBorrowed() +// to Python 3.13.0a6 +#if PY_VERSION_HEX < 0x030D00A6 && !defined(Py_CONSTANT_NONE) + +#define Py_CONSTANT_NONE 0 +#define Py_CONSTANT_FALSE 1 +#define Py_CONSTANT_TRUE 2 +#define Py_CONSTANT_ELLIPSIS 3 +#define Py_CONSTANT_NOT_IMPLEMENTED 4 +#define Py_CONSTANT_ZERO 5 +#define Py_CONSTANT_ONE 6 +#define Py_CONSTANT_EMPTY_STR 7 +#define Py_CONSTANT_EMPTY_BYTES 8 +#define Py_CONSTANT_EMPTY_TUPLE 9 + +static inline PyObject* Py_GetConstant(unsigned int constant_id) +{ + static PyObject* constants[Py_CONSTANT_EMPTY_TUPLE + 1] = {NULL}; + + if (constants[Py_CONSTANT_NONE] == NULL) { + constants[Py_CONSTANT_NONE] = Py_None; + constants[Py_CONSTANT_FALSE] = Py_False; + constants[Py_CONSTANT_TRUE] = Py_True; + constants[Py_CONSTANT_ELLIPSIS] = Py_Ellipsis; + constants[Py_CONSTANT_NOT_IMPLEMENTED] = Py_NotImplemented; + + constants[Py_CONSTANT_ZERO] = PyLong_FromLong(0); + if (constants[Py_CONSTANT_ZERO] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_ONE] = PyLong_FromLong(1); + if (constants[Py_CONSTANT_ONE] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_FromStringAndSize("", 0); + if (constants[Py_CONSTANT_EMPTY_STR] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize("", 0); + if (constants[Py_CONSTANT_EMPTY_BYTES] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0); + if (constants[Py_CONSTANT_EMPTY_TUPLE] == NULL) { + goto fatal_error; + } + // goto dance to avoid compiler warnings about Py_FatalError() + goto init_done; + +fatal_error: + // This case should never happen + Py_FatalError("Py_GetConstant() failed to get constants"); + } + +init_done: + if (constant_id <= Py_CONSTANT_EMPTY_TUPLE) { + return Py_NewRef(constants[constant_id]); + } + else { + PyErr_BadInternalCall(); + return NULL; + } +} + +static inline PyObject* Py_GetConstantBorrowed(unsigned int constant_id) +{ + PyObject *obj = Py_GetConstant(constant_id); + Py_XDECREF(obj); + return obj; +} +#endif + + +// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 +#if PY_VERSION_HEX < 0x030D00A4 +static inline PyObject * +PyList_GetItemRef(PyObject *op, Py_ssize_t index) +{ + PyObject *item = PyList_GetItem(op, index); + Py_XINCREF(item); + return item; +} +#endif + + +// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 +#if PY_VERSION_HEX < 0x030D00A4 +static inline int +PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value, + PyObject **result) +{ + PyObject *value; + if (PyDict_GetItemRef(d, key, &value) < 0) { + // get error + if (result) { + *result = NULL; + } + return -1; + } + if (value != NULL) { + // present + if (result) { + *result = value; + } + else { + Py_DECREF(value); + } + return 1; + } + + // missing: set the item + if (PyDict_SetItem(d, key, default_value) < 0) { + // set error + if (result) { + *result = NULL; + } + return -1; + } + if (result) { + *result = Py_NewRef(default_value); + } + return 0; +} +#endif + +#if PY_VERSION_HEX < 0x030D00B3 +# define Py_BEGIN_CRITICAL_SECTION(op) { +# define Py_END_CRITICAL_SECTION() } +# define Py_BEGIN_CRITICAL_SECTION2(a, b) { +# define Py_END_CRITICAL_SECTION2() } +#endif + +#if PY_VERSION_HEX < 0x030E0000 && PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION) +typedef struct PyUnicodeWriter PyUnicodeWriter; + +static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer) +{ + _PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer); + PyMem_Free(writer); +} + +static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length) +{ + if (length < 0) { + PyErr_SetString(PyExc_ValueError, + "length must be positive"); + return NULL; + } + + const size_t size = sizeof(_PyUnicodeWriter); + PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size); + if (pub_writer == _Py_NULL) { + PyErr_NoMemory(); + return _Py_NULL; + } + _PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer; + + _PyUnicodeWriter_Init(writer); + if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) { + PyUnicodeWriter_Discard(pub_writer); + return NULL; + } + writer->overallocate = 1; + return pub_writer; +} + +static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer) +{ + PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer); + assert(((_PyUnicodeWriter*)writer)->buffer == NULL); + PyMem_Free(writer); + return str; +} + +static inline int +PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch) +{ + if (ch > 0x10ffff) { + PyErr_SetString(PyExc_ValueError, + "character must be in range(0x110000)"); + return -1; + } + + return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch); +} + +static inline int +PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj) +{ + PyObject *str = PyObject_Str(obj); + if (str == NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); + Py_DECREF(str); + return res; +} + +static inline int +PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj) +{ + PyObject *str = PyObject_Repr(obj); + if (str == NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); + Py_DECREF(str); + return res; +} + +static inline int +PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer, + const char *str, Py_ssize_t size) +{ + if (size < 0) { + size = (Py_ssize_t)strlen(str); + } + + PyObject *str_obj = PyUnicode_FromStringAndSize(str, size); + if (str_obj == _Py_NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); + Py_DECREF(str_obj); + return res; +} + +static inline int +PyUnicodeWriter_WriteASCII(PyUnicodeWriter *writer, + const char *str, Py_ssize_t size) +{ + if (size < 0) { + size = (Py_ssize_t)strlen(str); + } + + return _PyUnicodeWriter_WriteASCIIString((_PyUnicodeWriter*)writer, + str, size); +} + +static inline int +PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer, + const wchar_t *str, Py_ssize_t size) +{ + if (size < 0) { + size = (Py_ssize_t)wcslen(str); + } + + PyObject *str_obj = PyUnicode_FromWideChar(str, size); + if (str_obj == _Py_NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); + Py_DECREF(str_obj); + return res; +} + +static inline int +PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str, + Py_ssize_t start, Py_ssize_t end) +{ + if (!PyUnicode_Check(str)) { + PyErr_Format(PyExc_TypeError, "expect str, not %s", + Py_TYPE(str)->tp_name); + return -1; + } + if (start < 0 || start > end) { + PyErr_Format(PyExc_ValueError, "invalid start argument"); + return -1; + } + if (end > PyUnicode_GET_LENGTH(str)) { + PyErr_Format(PyExc_ValueError, "invalid end argument"); + return -1; + } + + return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str, + start, end); +} + +static inline int +PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...) +{ + va_list vargs; + va_start(vargs, format); + PyObject *str = PyUnicode_FromFormatV(format, vargs); + va_end(vargs); + if (str == _Py_NULL) { + return -1; + } + + int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); + Py_DECREF(str); + return res; +} +#endif // PY_VERSION_HEX < 0x030E0000 + +// gh-116560 added PyLong_GetSign() to Python 3.14.0a0 +#if PY_VERSION_HEX < 0x030E00A0 +static inline int PyLong_GetSign(PyObject *obj, int *sign) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expect int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + + *sign = _PyLong_Sign(obj); + return 0; +} +#endif + +// gh-126061 added PyLong_IsPositive/Negative/Zero() to Python in 3.14.0a2 +#if PY_VERSION_HEX < 0x030E00A2 +static inline int PyLong_IsPositive(PyObject *obj) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + return _PyLong_Sign(obj) == 1; +} + +static inline int PyLong_IsNegative(PyObject *obj) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + return _PyLong_Sign(obj) == -1; +} + +static inline int PyLong_IsZero(PyObject *obj) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + return _PyLong_Sign(obj) == 0; +} +#endif + + +// gh-124502 added PyUnicode_Equal() to Python 3.14.0a0 +#if PY_VERSION_HEX < 0x030E00A0 +static inline int PyUnicode_Equal(PyObject *str1, PyObject *str2) +{ + if (!PyUnicode_Check(str1)) { + PyErr_Format(PyExc_TypeError, "first argument must be str, not %s", + Py_TYPE(str1)->tp_name); + return -1; + } + if (!PyUnicode_Check(str2)) { + PyErr_Format(PyExc_TypeError, "second argument must be str, not %s", + Py_TYPE(str2)->tp_name); + return -1; + } + +#if PY_VERSION_HEX >= 0x030d0000 && !defined(PYPY_VERSION) + PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *str1, PyObject *str2); + + return _PyUnicode_Equal(str1, str2); +#elif PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION) + return _PyUnicode_EQ(str1, str2); +#elif PY_VERSION_HEX >= 0x03090000 && defined(PYPY_VERSION) + return _PyUnicode_EQ(str1, str2); +#else + return (PyUnicode_Compare(str1, str2) == 0); +#endif +} +#endif + + +// gh-121645 added PyBytes_Join() to Python 3.14.0a0 +#if PY_VERSION_HEX < 0x030E00A0 +static inline PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable) +{ + return _PyBytes_Join(sep, iterable); +} +#endif + + +#if PY_VERSION_HEX < 0x030E00A0 +static inline Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len) +{ +#if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) + PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void *src, Py_ssize_t len); + + return _Py_HashBytes(ptr, len); +#else + Py_hash_t hash; + PyObject *bytes = PyBytes_FromStringAndSize((const char*)ptr, len); + if (bytes == NULL) { + return -1; + } + hash = PyObject_Hash(bytes); + Py_DECREF(bytes); + return hash; +#endif +} +#endif + + +#if PY_VERSION_HEX < 0x030E00A0 +static inline int PyIter_NextItem(PyObject *iter, PyObject **item) +{ + iternextfunc tp_iternext; + + assert(iter != NULL); + assert(item != NULL); + + tp_iternext = Py_TYPE(iter)->tp_iternext; + if (tp_iternext == NULL) { + *item = NULL; + PyErr_Format(PyExc_TypeError, "expected an iterator, got '%s'", + Py_TYPE(iter)->tp_name); + return -1; + } + + if ((*item = tp_iternext(iter))) { + return 1; + } + if (!PyErr_Occurred()) { + return 0; + } + if (PyErr_ExceptionMatches(PyExc_StopIteration)) { + PyErr_Clear(); + return 0; + } + return -1; +} +#endif + + +#if PY_VERSION_HEX < 0x030E00A0 +static inline PyObject* PyLong_FromInt32(int32_t value) +{ + Py_BUILD_ASSERT(sizeof(long) >= 4); + return PyLong_FromLong(value); +} + +static inline PyObject* PyLong_FromInt64(int64_t value) +{ + Py_BUILD_ASSERT(sizeof(long long) >= 8); + return PyLong_FromLongLong(value); +} + +static inline PyObject* PyLong_FromUInt32(uint32_t value) +{ + Py_BUILD_ASSERT(sizeof(unsigned long) >= 4); + return PyLong_FromUnsignedLong(value); +} + +static inline PyObject* PyLong_FromUInt64(uint64_t value) +{ + Py_BUILD_ASSERT(sizeof(unsigned long long) >= 8); + return PyLong_FromUnsignedLongLong(value); +} + +static inline int PyLong_AsInt32(PyObject *obj, int32_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(int) == 4); + int value = PyLong_AsInt(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + *pvalue = (int32_t)value; + return 0; +} + +static inline int PyLong_AsInt64(PyObject *obj, int64_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(long long) == 8); + long long value = PyLong_AsLongLong(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + *pvalue = (int64_t)value; + return 0; +} + +static inline int PyLong_AsUInt32(PyObject *obj, uint32_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(long) >= 4); + unsigned long value = PyLong_AsUnsignedLong(obj); + if (value == (unsigned long)-1 && PyErr_Occurred()) { + return -1; + } +#if SIZEOF_LONG > 4 + if ((unsigned long)UINT32_MAX < value) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C uint32_t"); + return -1; + } +#endif + *pvalue = (uint32_t)value; + return 0; +} + +static inline int PyLong_AsUInt64(PyObject *obj, uint64_t *pvalue) +{ + Py_BUILD_ASSERT(sizeof(long long) == 8); + unsigned long long value = PyLong_AsUnsignedLongLong(obj); + if (value == (unsigned long long)-1 && PyErr_Occurred()) { + return -1; + } + *pvalue = (uint64_t)value; + return 0; +} +#endif + + +// gh-102471 added import and export API for integers to 3.14.0a2. +#if PY_VERSION_HEX < 0x030E00A2 && PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) +// Helpers to access PyLongObject internals. +static inline void +_PyLong_SetSignAndDigitCount(PyLongObject *op, int sign, Py_ssize_t size) +{ +#if PY_VERSION_HEX >= 0x030C0000 + op->long_value.lv_tag = (uintptr_t)(1 - sign) | ((uintptr_t)(size) << 3); +#elif PY_VERSION_HEX >= 0x030900A4 + Py_SET_SIZE(op, sign * size); +#else + Py_SIZE(op) = sign * size; +#endif +} + +static inline Py_ssize_t +_PyLong_DigitCount(const PyLongObject *op) +{ +#if PY_VERSION_HEX >= 0x030C0000 + return (Py_ssize_t)(op->long_value.lv_tag >> 3); +#else + return _PyLong_Sign((PyObject*)op) < 0 ? -Py_SIZE(op) : Py_SIZE(op); +#endif +} + +static inline digit* +_PyLong_GetDigits(const PyLongObject *op) +{ +#if PY_VERSION_HEX >= 0x030C0000 + return (digit*)(op->long_value.ob_digit); +#else + return (digit*)(op->ob_digit); +#endif +} + +typedef struct PyLongLayout { + uint8_t bits_per_digit; + uint8_t digit_size; + int8_t digits_order; + int8_t digit_endianness; +} PyLongLayout; + +typedef struct PyLongExport { + int64_t value; + uint8_t negative; + Py_ssize_t ndigits; + const void *digits; + Py_uintptr_t _reserved; +} PyLongExport; + +typedef struct PyLongWriter PyLongWriter; + +static inline const PyLongLayout* +PyLong_GetNativeLayout(void) +{ + static const PyLongLayout PyLong_LAYOUT = { + PyLong_SHIFT, + sizeof(digit), + -1, // least significant first + PY_LITTLE_ENDIAN ? -1 : 1, + }; + + return &PyLong_LAYOUT; +} + +static inline int +PyLong_Export(PyObject *obj, PyLongExport *export_long) +{ + if (!PyLong_Check(obj)) { + memset(export_long, 0, sizeof(*export_long)); + PyErr_Format(PyExc_TypeError, "expected int, got %s", + Py_TYPE(obj)->tp_name); + return -1; + } + + // Fast-path: try to convert to a int64_t + PyLongObject *self = (PyLongObject*)obj; + int overflow; +#if SIZEOF_LONG == 8 + long value = PyLong_AsLongAndOverflow(obj, &overflow); +#else + // Windows has 32-bit long, so use 64-bit long long instead + long long value = PyLong_AsLongLongAndOverflow(obj, &overflow); +#endif + Py_BUILD_ASSERT(sizeof(value) == sizeof(int64_t)); + // the function cannot fail since obj is a PyLongObject + assert(!(value == -1 && PyErr_Occurred())); + + if (!overflow) { + export_long->value = value; + export_long->negative = 0; + export_long->ndigits = 0; + export_long->digits = 0; + export_long->_reserved = 0; + } + else { + export_long->value = 0; + export_long->negative = _PyLong_Sign(obj) < 0; + export_long->ndigits = _PyLong_DigitCount(self); + if (export_long->ndigits == 0) { + export_long->ndigits = 1; + } + export_long->digits = _PyLong_GetDigits(self); + export_long->_reserved = (Py_uintptr_t)Py_NewRef(obj); + } + return 0; +} + +static inline void +PyLong_FreeExport(PyLongExport *export_long) +{ + PyObject *obj = (PyObject*)export_long->_reserved; + + if (obj) { + export_long->_reserved = 0; + Py_DECREF(obj); + } +} + +static inline PyLongWriter* +PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) +{ + if (ndigits <= 0) { + PyErr_SetString(PyExc_ValueError, "ndigits must be positive"); + return NULL; + } + assert(digits != NULL); + + PyLongObject *obj = _PyLong_New(ndigits); + if (obj == NULL) { + return NULL; + } + _PyLong_SetSignAndDigitCount(obj, negative?-1:1, ndigits); + + *digits = _PyLong_GetDigits(obj); + return (PyLongWriter*)obj; +} + +static inline void +PyLongWriter_Discard(PyLongWriter *writer) +{ + PyLongObject *obj = (PyLongObject *)writer; + + assert(Py_REFCNT(obj) == 1); + Py_DECREF(obj); +} + +static inline PyObject* +PyLongWriter_Finish(PyLongWriter *writer) +{ + PyObject *obj = (PyObject *)writer; + PyLongObject *self = (PyLongObject*)obj; + Py_ssize_t j = _PyLong_DigitCount(self); + Py_ssize_t i = j; + int sign = _PyLong_Sign(obj); + + assert(Py_REFCNT(obj) == 1); + + // Normalize and get singleton if possible + while (i > 0 && _PyLong_GetDigits(self)[i-1] == 0) { + --i; + } + if (i != j) { + if (i == 0) { + sign = 0; + } + _PyLong_SetSignAndDigitCount(self, sign, i); + } + if (i <= 1) { + long val = sign * (long)(_PyLong_GetDigits(self)[0]); + Py_DECREF(obj); + return PyLong_FromLong(val); + } + + return obj; +} +#endif + + +#if PY_VERSION_HEX < 0x030C00A3 +# define Py_T_SHORT T_SHORT +# define Py_T_INT T_INT +# define Py_T_LONG T_LONG +# define Py_T_FLOAT T_FLOAT +# define Py_T_DOUBLE T_DOUBLE +# define Py_T_STRING T_STRING +# define _Py_T_OBJECT T_OBJECT +# define Py_T_CHAR T_CHAR +# define Py_T_BYTE T_BYTE +# define Py_T_UBYTE T_UBYTE +# define Py_T_USHORT T_USHORT +# define Py_T_UINT T_UINT +# define Py_T_ULONG T_ULONG +# define Py_T_STRING_INPLACE T_STRING_INPLACE +# define Py_T_BOOL T_BOOL +# define Py_T_OBJECT_EX T_OBJECT_EX +# define Py_T_LONGLONG T_LONGLONG +# define Py_T_ULONGLONG T_ULONGLONG +# define Py_T_PYSSIZET T_PYSSIZET + +# if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) +# define _Py_T_NONE T_NONE +# endif + +# define Py_READONLY READONLY +# define Py_AUDIT_READ READ_RESTRICTED +# define _Py_WRITE_RESTRICTED PY_WRITE_RESTRICTED +#endif + + +// gh-127350 added Py_fopen() and Py_fclose() to Python 3.14a4 +#if PY_VERSION_HEX < 0x030E00A4 +static inline FILE* Py_fopen(PyObject *path, const char *mode) +{ +#if 0x030400A2 <= PY_VERSION_HEX && !defined(PYPY_VERSION) + PyAPI_FUNC(FILE*) _Py_fopen_obj(PyObject *path, const char *mode); + + return _Py_fopen_obj(path, mode); +#else + FILE *f; + PyObject *bytes; +#if PY_VERSION_HEX >= 0x03000000 + if (!PyUnicode_FSConverter(path, &bytes)) { + return NULL; + } +#else + if (!PyString_Check(path)) { + PyErr_SetString(PyExc_TypeError, "except str"); + return NULL; + } + bytes = Py_NewRef(path); +#endif + const char *path_bytes = PyBytes_AS_STRING(bytes); + + f = fopen(path_bytes, mode); + Py_DECREF(bytes); + + if (f == NULL) { + PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path); + return NULL; + } + return f; +#endif +} + +static inline int Py_fclose(FILE *file) +{ + return fclose(file); +} +#endif + + +#if 0x03090000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030E0000 && !defined(PYPY_VERSION) +static inline PyObject* +PyConfig_Get(const char *name) +{ + typedef enum { + _PyConfig_MEMBER_INT, + _PyConfig_MEMBER_UINT, + _PyConfig_MEMBER_ULONG, + _PyConfig_MEMBER_BOOL, + _PyConfig_MEMBER_WSTR, + _PyConfig_MEMBER_WSTR_OPT, + _PyConfig_MEMBER_WSTR_LIST, + } PyConfigMemberType; + + typedef struct { + const char *name; + size_t offset; + PyConfigMemberType type; + const char *sys_attr; + } PyConfigSpec; + +#define PYTHONCAPI_COMPAT_SPEC(MEMBER, TYPE, sys_attr) \ + {#MEMBER, offsetof(PyConfig, MEMBER), \ + _PyConfig_MEMBER_##TYPE, sys_attr} + + static const PyConfigSpec config_spec[] = { + PYTHONCAPI_COMPAT_SPEC(argv, WSTR_LIST, "argv"), + PYTHONCAPI_COMPAT_SPEC(base_exec_prefix, WSTR_OPT, "base_exec_prefix"), + PYTHONCAPI_COMPAT_SPEC(base_executable, WSTR_OPT, "_base_executable"), + PYTHONCAPI_COMPAT_SPEC(base_prefix, WSTR_OPT, "base_prefix"), + PYTHONCAPI_COMPAT_SPEC(bytes_warning, UINT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(exec_prefix, WSTR_OPT, "exec_prefix"), + PYTHONCAPI_COMPAT_SPEC(executable, WSTR_OPT, "executable"), + PYTHONCAPI_COMPAT_SPEC(inspect, BOOL, _Py_NULL), +#if 0x030C0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(int_max_str_digits, UINT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(interactive, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(module_search_paths, WSTR_LIST, "path"), + PYTHONCAPI_COMPAT_SPEC(optimization_level, UINT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(parser_debug, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(platlibdir, WSTR, "platlibdir"), + PYTHONCAPI_COMPAT_SPEC(prefix, WSTR_OPT, "prefix"), + PYTHONCAPI_COMPAT_SPEC(pycache_prefix, WSTR_OPT, "pycache_prefix"), + PYTHONCAPI_COMPAT_SPEC(quiet, BOOL, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(stdlib_dir, WSTR_OPT, "_stdlib_dir"), +#endif + PYTHONCAPI_COMPAT_SPEC(use_environment, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(verbose, UINT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(warnoptions, WSTR_LIST, "warnoptions"), + PYTHONCAPI_COMPAT_SPEC(write_bytecode, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(xoptions, WSTR_LIST, "_xoptions"), + PYTHONCAPI_COMPAT_SPEC(buffered_stdio, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(check_hash_pycs_mode, WSTR, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(code_debug_ranges, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(configure_c_stdio, BOOL, _Py_NULL), +#if 0x030D0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(cpu_count, INT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(dev_mode, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(dump_refs, BOOL, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(dump_refs_file, WSTR_OPT, _Py_NULL), +#endif +#ifdef Py_GIL_DISABLED + PYTHONCAPI_COMPAT_SPEC(enable_gil, INT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(faulthandler, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(filesystem_encoding, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(filesystem_errors, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(hash_seed, ULONG, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(home, WSTR_OPT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(import_time, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(install_signal_handlers, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(isolated, BOOL, _Py_NULL), +#ifdef MS_WINDOWS + PYTHONCAPI_COMPAT_SPEC(legacy_windows_stdio, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(malloc_stats, BOOL, _Py_NULL), +#if 0x030A0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(orig_argv, WSTR_LIST, "orig_argv"), +#endif + PYTHONCAPI_COMPAT_SPEC(parse_argv, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(pathconfig_warnings, BOOL, _Py_NULL), +#if 0x030C0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(perf_profiling, UINT, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(program_name, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(run_command, WSTR_OPT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(run_filename, WSTR_OPT, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(run_module, WSTR_OPT, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(safe_path, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(show_ref_count, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(site_import, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(skip_source_first_line, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(stdio_encoding, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(stdio_errors, WSTR, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(tracemalloc, UINT, _Py_NULL), +#if 0x030B0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(use_frozen_modules, BOOL, _Py_NULL), +#endif + PYTHONCAPI_COMPAT_SPEC(use_hash_seed, BOOL, _Py_NULL), + PYTHONCAPI_COMPAT_SPEC(user_site_directory, BOOL, _Py_NULL), +#if 0x030A0000 <= PY_VERSION_HEX + PYTHONCAPI_COMPAT_SPEC(warn_default_encoding, BOOL, _Py_NULL), +#endif + }; + +#undef PYTHONCAPI_COMPAT_SPEC + + const PyConfigSpec *spec; + int found = 0; + for (size_t i=0; i < sizeof(config_spec) / sizeof(config_spec[0]); i++) { + spec = &config_spec[i]; + if (strcmp(spec->name, name) == 0) { + found = 1; + break; + } + } + if (found) { + if (spec->sys_attr != NULL) { + PyObject *value = PySys_GetObject(spec->sys_attr); + if (value == NULL) { + PyErr_Format(PyExc_RuntimeError, "lost sys.%s", spec->sys_attr); + return NULL; + } + return Py_NewRef(value); + } + + PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); + + const PyConfig *config = _Py_GetConfig(); + void *member = (char *)config + spec->offset; + switch (spec->type) { + case _PyConfig_MEMBER_INT: + case _PyConfig_MEMBER_UINT: + { + int value = *(int *)member; + return PyLong_FromLong(value); + } + case _PyConfig_MEMBER_BOOL: + { + int value = *(int *)member; + return PyBool_FromLong(value != 0); + } + case _PyConfig_MEMBER_ULONG: + { + unsigned long value = *(unsigned long *)member; + return PyLong_FromUnsignedLong(value); + } + case _PyConfig_MEMBER_WSTR: + case _PyConfig_MEMBER_WSTR_OPT: + { + wchar_t *wstr = *(wchar_t **)member; + if (wstr != NULL) { + return PyUnicode_FromWideChar(wstr, -1); + } + else { + return Py_NewRef(Py_None); + } + } + case _PyConfig_MEMBER_WSTR_LIST: + { + const PyWideStringList *list = (const PyWideStringList *)member; + PyObject *tuple = PyTuple_New(list->length); + if (tuple == NULL) { + return NULL; + } + + for (Py_ssize_t i = 0; i < list->length; i++) { + PyObject *item = PyUnicode_FromWideChar(list->items[i], -1); + if (item == NULL) { + Py_DECREF(tuple); + return NULL; + } + PyTuple_SET_ITEM(tuple, i, item); + } + return tuple; + } + default: + Py_UNREACHABLE(); + } + } + + PyErr_Format(PyExc_ValueError, "unknown config option name: %s", name); + return NULL; +} + +static inline int +PyConfig_GetInt(const char *name, int *value) +{ + PyObject *obj = PyConfig_Get(name); + if (obj == NULL) { + return -1; + } + + if (!PyLong_Check(obj)) { + Py_DECREF(obj); + PyErr_Format(PyExc_TypeError, "config option %s is not an int", name); + return -1; + } + + int as_int = PyLong_AsInt(obj); + Py_DECREF(obj); + if (as_int == -1 && PyErr_Occurred()) { + PyErr_Format(PyExc_OverflowError, + "config option %s value does not fit into a C int", name); + return -1; + } + + *value = as_int; + return 0; +} +#endif // PY_VERSION_HEX > 0x03090000 && !defined(PYPY_VERSION) + +// gh-133144 added PyUnstable_Object_IsUniquelyReferenced() to Python 3.14.0b1. +// Adapted from _PyObject_IsUniquelyReferenced() implementation. +#if PY_VERSION_HEX < 0x030E00B0 +static inline int PyUnstable_Object_IsUniquelyReferenced(PyObject *obj) +{ +#if !defined(Py_GIL_DISABLED) + return Py_REFCNT(obj) == 1; +#else + // NOTE: the entire ob_ref_shared field must be zero, including flags, to + // ensure that other threads cannot concurrently create new references to + // this object. + return (_Py_IsOwnedByCurrentThread(obj) && + _Py_atomic_load_uint32_relaxed(&obj->ob_ref_local) == 1 && + _Py_atomic_load_ssize_relaxed(&obj->ob_ref_shared) == 0); +#endif +} +#endif + + +#if PY_VERSION_HEX < 0x030F0000 +static inline PyObject* +PySys_GetAttrString(const char *name) +{ +#if PY_VERSION_HEX >= 0x03000000 + PyObject *value = Py_XNewRef(PySys_GetObject(name)); +#else + PyObject *value = Py_XNewRef(PySys_GetObject((char*)name)); +#endif + if (value != NULL) { + return value; + } + if (!PyErr_Occurred()) { + PyErr_Format(PyExc_RuntimeError, "lost sys.%s", name); + } + return NULL; +} + +static inline PyObject* +PySys_GetAttr(PyObject *name) +{ +#if PY_VERSION_HEX >= 0x03000000 + const char *name_str = PyUnicode_AsUTF8(name); +#else + const char *name_str = PyString_AsString(name); +#endif + if (name_str == NULL) { + return NULL; + } + + return PySys_GetAttrString(name_str); +} + +static inline int +PySys_GetOptionalAttrString(const char *name, PyObject **value) +{ +#if PY_VERSION_HEX >= 0x03000000 + *value = Py_XNewRef(PySys_GetObject(name)); +#else + *value = Py_XNewRef(PySys_GetObject((char*)name)); +#endif + if (*value != NULL) { + return 1; + } + return 0; +} + +static inline int +PySys_GetOptionalAttr(PyObject *name, PyObject **value) +{ +#if PY_VERSION_HEX >= 0x03000000 + const char *name_str = PyUnicode_AsUTF8(name); +#else + const char *name_str = PyString_AsString(name); +#endif + if (name_str == NULL) { + *value = NULL; + return -1; + } + + return PySys_GetOptionalAttrString(name_str, value); +} +#endif // PY_VERSION_HEX < 0x030F00A1 + + +#if PY_VERSION_HEX < 0x030F00A1 +typedef struct PyBytesWriter { + char small_buffer[256]; + PyObject *obj; + Py_ssize_t size; +} PyBytesWriter; + +static inline Py_ssize_t +_PyBytesWriter_GetAllocated(PyBytesWriter *writer) +{ + if (writer->obj == NULL) { + return sizeof(writer->small_buffer); + } + else { + return PyBytes_GET_SIZE(writer->obj); + } +} + + +static inline int +_PyBytesWriter_Resize_impl(PyBytesWriter *writer, Py_ssize_t size, + int resize) +{ + int overallocate = resize; + assert(size >= 0); + + if (size <= _PyBytesWriter_GetAllocated(writer)) { + return 0; + } + + if (overallocate) { +#ifdef MS_WINDOWS + /* On Windows, overallocate by 50% is the best factor */ + if (size <= (PY_SSIZE_T_MAX - size / 2)) { + size += size / 2; + } +#else + /* On Linux, overallocate by 25% is the best factor */ + if (size <= (PY_SSIZE_T_MAX - size / 4)) { + size += size / 4; + } +#endif + } + + if (writer->obj != NULL) { + if (_PyBytes_Resize(&writer->obj, size)) { + return -1; + } + assert(writer->obj != NULL); + } + else { + writer->obj = PyBytes_FromStringAndSize(NULL, size); + if (writer->obj == NULL) { + return -1; + } + + if (resize) { + assert((size_t)size > sizeof(writer->small_buffer)); + memcpy(PyBytes_AS_STRING(writer->obj), + writer->small_buffer, + sizeof(writer->small_buffer)); + } + } + return 0; +} + +static inline void* +PyBytesWriter_GetData(PyBytesWriter *writer) +{ + if (writer->obj == NULL) { + return writer->small_buffer; + } + else { + return PyBytes_AS_STRING(writer->obj); + } +} + +static inline Py_ssize_t +PyBytesWriter_GetSize(PyBytesWriter *writer) +{ + return writer->size; +} + +static inline void +PyBytesWriter_Discard(PyBytesWriter *writer) +{ + if (writer == NULL) { + return; + } + + Py_XDECREF(writer->obj); + PyMem_Free(writer); +} + +static inline PyBytesWriter* +PyBytesWriter_Create(Py_ssize_t size) +{ + if (size < 0) { + PyErr_SetString(PyExc_ValueError, "size must be >= 0"); + return NULL; + } + + PyBytesWriter *writer = (PyBytesWriter*)PyMem_Malloc(sizeof(PyBytesWriter)); + if (writer == NULL) { + PyErr_NoMemory(); + return NULL; + } + + writer->obj = NULL; + writer->size = 0; + + if (size >= 1) { + if (_PyBytesWriter_Resize_impl(writer, size, 0) < 0) { + PyBytesWriter_Discard(writer); + return NULL; + } + writer->size = size; + } + return writer; +} + +static inline PyObject* +PyBytesWriter_FinishWithSize(PyBytesWriter *writer, Py_ssize_t size) +{ + PyObject *result; + if (size == 0) { + result = PyBytes_FromStringAndSize("", 0); + } + else if (writer->obj != NULL) { + if (size != PyBytes_GET_SIZE(writer->obj)) { + if (_PyBytes_Resize(&writer->obj, size)) { + goto error; + } + } + result = writer->obj; + writer->obj = NULL; + } + else { + result = PyBytes_FromStringAndSize(writer->small_buffer, size); + } + PyBytesWriter_Discard(writer); + return result; + +error: + PyBytesWriter_Discard(writer); + return NULL; +} + +static inline PyObject* +PyBytesWriter_Finish(PyBytesWriter *writer) +{ + return PyBytesWriter_FinishWithSize(writer, writer->size); +} + +static inline PyObject* +PyBytesWriter_FinishWithPointer(PyBytesWriter *writer, void *buf) +{ + Py_ssize_t size = (char*)buf - (char*)PyBytesWriter_GetData(writer); + if (size < 0 || size > _PyBytesWriter_GetAllocated(writer)) { + PyBytesWriter_Discard(writer); + PyErr_SetString(PyExc_ValueError, "invalid end pointer"); + return NULL; + } + + return PyBytesWriter_FinishWithSize(writer, size); +} + +static inline int +PyBytesWriter_Resize(PyBytesWriter *writer, Py_ssize_t size) +{ + if (size < 0) { + PyErr_SetString(PyExc_ValueError, "size must be >= 0"); + return -1; + } + if (_PyBytesWriter_Resize_impl(writer, size, 1) < 0) { + return -1; + } + writer->size = size; + return 0; +} + +static inline int +PyBytesWriter_Grow(PyBytesWriter *writer, Py_ssize_t size) +{ + if (size < 0 && writer->size + size < 0) { + PyErr_SetString(PyExc_ValueError, "invalid size"); + return -1; + } + if (size > PY_SSIZE_T_MAX - writer->size) { + PyErr_NoMemory(); + return -1; + } + size = writer->size + size; + + if (_PyBytesWriter_Resize_impl(writer, size, 1) < 0) { + return -1; + } + writer->size = size; + return 0; +} + +static inline void* +PyBytesWriter_GrowAndUpdatePointer(PyBytesWriter *writer, + Py_ssize_t size, void *buf) +{ + Py_ssize_t pos = (char*)buf - (char*)PyBytesWriter_GetData(writer); + if (PyBytesWriter_Grow(writer, size) < 0) { + return NULL; + } + return (char*)PyBytesWriter_GetData(writer) + pos; +} + +static inline int +PyBytesWriter_WriteBytes(PyBytesWriter *writer, + const void *bytes, Py_ssize_t size) +{ + if (size < 0) { + size_t len = strlen((const char*)bytes); + if (len > (size_t)PY_SSIZE_T_MAX) { + PyErr_NoMemory(); + return -1; + } + size = (Py_ssize_t)len; + } + + Py_ssize_t pos = writer->size; + if (PyBytesWriter_Grow(writer, size) < 0) { + return -1; + } + char *buf = (char*)PyBytesWriter_GetData(writer); + memcpy(buf + pos, bytes, (size_t)size); + return 0; +} + +static inline int +PyBytesWriter_Format(PyBytesWriter *writer, const char *format, ...) + Py_GCC_ATTRIBUTE((format(printf, 2, 3))); + +static inline int +PyBytesWriter_Format(PyBytesWriter *writer, const char *format, ...) +{ + va_list vargs; + va_start(vargs, format); + PyObject *str = PyBytes_FromFormatV(format, vargs); + va_end(vargs); + + if (str == NULL) { + return -1; + } + int res = PyBytesWriter_WriteBytes(writer, + PyBytes_AS_STRING(str), + PyBytes_GET_SIZE(str)); + Py_DECREF(str); + return res; +} +#endif // PY_VERSION_HEX < 0x030F00A1 + + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT diff --git a/asyncpg/protocol/record/pythoncapi_compat_extras.h b/asyncpg/protocol/record/pythoncapi_compat_extras.h new file mode 100644 index 00000000..3db97665 --- /dev/null +++ b/asyncpg/protocol/record/pythoncapi_compat_extras.h @@ -0,0 +1,72 @@ +#ifndef PYTHONCAPI_COMPAT_EXTRAS +#define PYTHONCAPI_COMPAT_EXTRAS + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +// Python 3.11.0a6 added PyType_GetModuleByDef() to Python.h +#if PY_VERSION_HEX < 0x030b00A6 +PyObject * +PyType_GetModuleByDef(PyTypeObject *type, PyModuleDef *def) +{ + assert(PyType_Check(type)); + + if (!PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { + // type_ready_mro() ensures that no heap type is + // contained in a static type MRO. + goto error; + } + else { + PyHeapTypeObject *ht = (PyHeapTypeObject*)type; + PyObject *module = ht->ht_module; + if (module && PyModule_GetDef(module) == def) { + return module; + } + } + + PyObject *res = NULL; + PyObject *mro = type->tp_mro; + // The type must be ready + assert(mro != NULL); + assert(PyTuple_Check(mro)); + // mro_invoke() ensures that the type MRO cannot be empty. + assert(PyTuple_GET_SIZE(mro) >= 1); + // Also, the first item in the MRO is the type itself, which + // we already checked above. We skip it in the loop. + assert(PyTuple_GET_ITEM(mro, 0) == (PyObject *)type); + + Py_ssize_t n = PyTuple_GET_SIZE(mro); + for (Py_ssize_t i = 1; i < n; i++) { + PyObject *super = PyTuple_GET_ITEM(mro, i); + if (!PyType_HasFeature((PyTypeObject *)super, Py_TPFLAGS_HEAPTYPE)) { + // Static types in the MRO need to be skipped + continue; + } + + PyHeapTypeObject *ht = (PyHeapTypeObject*)super; + PyObject *module = ht->ht_module; + if (module && PyModule_GetDef(module) == def) { + res = module; + break; + } + } + + if (res != NULL) { + return res; + } +error: + PyErr_Format( + PyExc_TypeError, + "PyType_GetModuleByDef: No superclass of '%s' has the given module", + type->tp_name); + return NULL; +} +#endif + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT_EXTRAS diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c index 9767f43b..58c66662 100644 --- a/asyncpg/protocol/record/recordobj.c +++ b/asyncpg/protocol/record/recordobj.c @@ -6,68 +6,150 @@ License: PSFL v2; see CPython/LICENSE for details. */ +#include +#include +#include "pythoncapi_compat.h" +#include "pythoncapi_compat_extras.h" + #include "recordobj.h" -#ifdef _PyObject_GC_IS_TRACKED -# define _ApgObject_GC_IS_TRACKED _PyObject_GC_IS_TRACKED -#else -# define _ApgObject_GC_IS_TRACKED PyObject_GC_IsTracked +#ifndef _PyCFunction_CAST +#define _PyCFunction_CAST(func) ((PyCFunction)(void (*)(void))(func)) #endif -static PyObject * record_iter(PyObject *); -static PyObject * record_new_items_iter(PyObject *); +static size_t ApgRecord_MAXSIZE = + (((size_t)PY_SSIZE_T_MAX - sizeof(ApgRecordObject) - sizeof(PyObject *)) / + sizeof(PyObject *)); + +/* Largest record to save on free list */ +#define ApgRecord_MAXSAVESIZE 20 + +/* Maximum number of records of each size to save */ +#define ApgRecord_MAXFREELIST 2000 + +typedef struct { + ApgRecordObject *freelist[ApgRecord_MAXSAVESIZE]; + int numfree[ApgRecord_MAXSAVESIZE]; +} record_freelist_state; + +typedef struct { + PyTypeObject *ApgRecord_Type; + PyTypeObject *ApgRecordDesc_Type; + PyTypeObject *ApgRecordIter_Type; + PyTypeObject *ApgRecordItems_Type; + + Py_tss_t freelist_key; // TSS key for per-thread record_freelist_state +} record_module_state; + +static inline record_module_state * +get_module_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + if (state == NULL) { + PyErr_SetString(PyExc_SystemError, "failed to get record module state"); + return NULL; + } + return (record_module_state *)state; +} + +static inline record_module_state * +get_module_state_from_type(PyTypeObject *type) +{ + void *state = PyType_GetModuleState(type); + if (state != NULL) { + return (record_module_state *)state; + } + + PyErr_Format(PyExc_SystemError, "could not get record module state from '%.100s'", + type->tp_name); + return NULL; +} -static ApgRecordObject *free_list[ApgRecord_MAXSAVESIZE]; -static int numfree[ApgRecord_MAXSAVESIZE]; +static struct PyModuleDef _recordmodule; -static size_t MAX_RECORD_SIZE = ( - ((size_t)PY_SSIZE_T_MAX - sizeof(ApgRecordObject) - sizeof(PyObject *)) - / sizeof(PyObject *) -); +static inline record_module_state * +find_module_state_by_def(PyTypeObject *type) +{ + PyObject *mod = PyType_GetModuleByDef(type, &_recordmodule); + if (mod == NULL) + return NULL; + return get_module_state(mod); +} +static inline record_freelist_state * +get_freelist_state(record_module_state *state) +{ + record_freelist_state *freelist; + + freelist = (record_freelist_state *)PyThread_tss_get(&state->freelist_key); + if (freelist == NULL) { + freelist = (record_freelist_state *)PyMem_Calloc( + 1, sizeof(record_freelist_state)); + if (freelist == NULL) { + PyErr_NoMemory(); + return NULL; + } + if (PyThread_tss_set(&state->freelist_key, (void *)freelist) != 0) { + PyMem_Free(freelist); + PyErr_SetString( + PyExc_SystemError, "failed to set thread-specific data"); + return NULL; + } + } + return freelist; +} PyObject * -ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) +make_record(PyTypeObject *type, PyObject *desc, Py_ssize_t size, + record_module_state *state) { ApgRecordObject *o; Py_ssize_t i; int need_gc_track = 0; - if (size < 0 || desc == NULL || !ApgRecordDesc_CheckExact(desc)) { + if (size < 0 || desc == NULL || + Py_TYPE(desc) != state->ApgRecordDesc_Type) { PyErr_BadInternalCall(); return NULL; } - if (type == &ApgRecord_Type) { - if (size < ApgRecord_MAXSAVESIZE && (o = free_list[size]) != NULL) { - free_list[size] = (ApgRecordObject *) o->ob_item[0]; - numfree[size]--; - _Py_NewReference((PyObject *)o); + if (type == state->ApgRecord_Type) { + record_freelist_state *freelist = NULL; + + if (size < ApgRecord_MAXSAVESIZE) { + freelist = get_freelist_state(state); + if (freelist != NULL && freelist->freelist[size] != NULL) { + o = freelist->freelist[size]; + freelist->freelist[size] = (ApgRecordObject *)o->ob_item[0]; + freelist->numfree[size]--; + _Py_NewReference((PyObject *)o); + } + else { + freelist = NULL; + } } - else { - /* Check for overflow */ - if ((size_t)size > MAX_RECORD_SIZE) { + + if (freelist == NULL) { + if ((size_t)size > ApgRecord_MAXSIZE) { return PyErr_NoMemory(); } - o = PyObject_GC_NewVar(ApgRecordObject, &ApgRecord_Type, size); + o = PyObject_GC_NewVar(ApgRecordObject, state->ApgRecord_Type, size); if (o == NULL) { return NULL; } } need_gc_track = 1; - } else { - assert(PyType_IsSubtype(type, &ApgRecord_Type)); + } + else { + assert(PyType_IsSubtype(type, state->ApgRecord_Type)); - if ((size_t)size > MAX_RECORD_SIZE) { + if ((size_t)size > ApgRecord_MAXSIZE) { return PyErr_NoMemory(); } o = (ApgRecordObject *)type->tp_alloc(type, size); - if (!_ApgObject_GC_IS_TRACKED((PyObject *)o)) { - PyErr_SetString( - PyExc_TypeError, - "record subclass is not tracked by GC" - ); + if (!PyObject_GC_IsTracked((PyObject *)o)) { + PyErr_SetString(PyExc_TypeError, "record subclass is not tracked by GC"); return NULL; } } @@ -77,20 +159,28 @@ ApgRecord_New(PyTypeObject *type, PyObject *desc, Py_ssize_t size) } Py_INCREF(desc); - o->desc = (ApgRecordDescObject*)desc; + o->desc = (ApgRecordDescObject *)desc; o->self_hash = -1; if (need_gc_track) { PyObject_GC_Track(o); } - return (PyObject *) o; + return (PyObject *)o; } - static void -record_dealloc(ApgRecordObject *o) +record_dealloc(PyObject *self) { + ApgRecordObject *o = (ApgRecordObject *)self; Py_ssize_t i; Py_ssize_t len = Py_SIZE(o); + PyTypeObject *tp = Py_TYPE(o); + record_module_state *state; + int skip_dealloc = 0; + + state = find_module_state_by_def(tp); + if (state == NULL) { + return; + } PyObject_GC_UnTrack(o); @@ -99,131 +189,114 @@ record_dealloc(ApgRecordObject *o) Py_CLEAR(o->desc); Py_TRASHCAN_BEGIN(o, record_dealloc) - if (len > 0) { - i = len; - while (--i >= 0) { - Py_CLEAR(o->ob_item[i]); - } - if (len < ApgRecord_MAXSAVESIZE && - numfree[len] < ApgRecord_MAXFREELIST && - ApgRecord_CheckExact(o)) - { - o->ob_item[0] = (PyObject *) free_list[len]; - numfree[len]++; - free_list[len] = o; - goto done; /* return */ + i = len; + while (--i >= 0) { + Py_XDECREF(o->ob_item[i]); + } + + if (len < ApgRecord_MAXSAVESIZE && tp == state->ApgRecord_Type) { + record_freelist_state *freelist = get_freelist_state(state); + if (freelist != NULL && freelist->numfree[len] < ApgRecord_MAXFREELIST) { + o->ob_item[0] = (PyObject *)freelist->freelist[len]; + freelist->numfree[len]++; + freelist->freelist[len] = o; + skip_dealloc = 1; } } - Py_TYPE(o)->tp_free((PyObject *)o); -done: + + if (!skip_dealloc) { + tp->tp_free(self); + Py_DECREF(tp); + } + Py_TRASHCAN_END } - static int -record_traverse(ApgRecordObject *o, visitproc visit, void *arg) +record_traverse(PyObject *self, visitproc visit, void *arg) { - Py_ssize_t i; - - Py_VISIT(o->desc); - - for (i = Py_SIZE(o); --i >= 0;) { - if (o->ob_item[i] != NULL) { - Py_VISIT(o->ob_item[i]); - } + ApgRecordObject *o = (ApgRecordObject *)self; + for (Py_ssize_t i = Py_SIZE(o); --i >= 0;) { + Py_VISIT(o->ob_item[i]); } - return 0; } - -static Py_ssize_t -record_length(ApgRecordObject *o) -{ - return Py_SIZE(o); -} - - -#if PY_VERSION_HEX >= 0x03080000 - +/* Below are the official constants from the xxHash specification. Optimizing + compilers should emit a single "rotate" instruction for the + _PyTuple_HASH_XXROTATE() expansion. If that doesn't happen for some important + platform, the macro could be changed to expand to a platform-specific rotate + spelling instead. +*/ #if SIZEOF_PY_UHASH_T > 4 -#define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL) -#define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL) -#define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL) -#define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */ +#define _ApgRecord_HASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL) +#define _ApgRecord_HASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL) +#define _ApgRecord_HASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL) +#define _ApgRecord_HASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */ #else -#define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL) -#define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL) -#define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL) -#define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */ +#define _ApgRecord_HASH_XXPRIME_1 ((Py_uhash_t)2654435761UL) +#define _ApgRecord_HASH_XXPRIME_2 ((Py_uhash_t)2246822519UL) +#define _ApgRecord_HASH_XXPRIME_5 ((Py_uhash_t)374761393UL) +#define _ApgRecord_HASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */ #endif static Py_hash_t -record_hash(ApgRecordObject *v) +record_hash(PyObject *op) { - Py_uhash_t acc = _PyHASH_XXPRIME_5; - size_t i, len = (size_t)Py_SIZE(v); - PyObject **els = v->ob_item; - for (i = 0; i < len; i++) { - Py_uhash_t lane = (Py_uhash_t)PyObject_Hash(els[i]); + ApgRecordObject *v = (ApgRecordObject *)op; + Py_uhash_t acc; + Py_ssize_t len = Py_SIZE(v); + PyObject **item = v->ob_item; + acc = _ApgRecord_HASH_XXPRIME_5; + for (Py_ssize_t i = 0; i < len; i++) { + Py_uhash_t lane = (Py_uhash_t)PyObject_Hash(item[i]); if (lane == (Py_uhash_t)-1) { return -1; } - acc += lane * _PyHASH_XXPRIME_2; - acc = _PyHASH_XXROTATE(acc); - acc *= _PyHASH_XXPRIME_1; + acc += lane * _ApgRecord_HASH_XXPRIME_2; + acc = _ApgRecord_HASH_XXROTATE(acc); + acc *= _ApgRecord_HASH_XXPRIME_1; } /* Add input length, mangled to keep the historical value of hash(()). */ - acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL); + acc += (Py_uhash_t)len ^ (_ApgRecord_HASH_XXPRIME_5 ^ 3527539UL); if (acc == (Py_uhash_t)-1) { - return 1546275796; + acc = 1546275796; } + return (Py_hash_t)acc; } -#else - -static Py_hash_t -record_hash(ApgRecordObject *v) +static Py_ssize_t +record_length(PyObject *self) { - Py_uhash_t x; /* Unsigned for defined overflow behavior. */ - Py_hash_t y; - Py_ssize_t len; - PyObject **p; - Py_uhash_t mult; + ApgRecordObject *a = (ApgRecordObject *)self; + return Py_SIZE(a); +} - if (v->self_hash != -1) { - return v->self_hash; +static int +record_contains(PyObject *self, PyObject *el) +{ + ApgRecordObject *a = (ApgRecordObject *)self; + if (a->desc == NULL || a->desc->keys == NULL) { + return 0; } + return PySequence_Contains(a->desc->keys, el); +} - len = Py_SIZE(v); - mult = _PyHASH_MULTIPLIER; - - x = 0x345678UL; - p = v->ob_item; - while (--len >= 0) { - y = PyObject_Hash(*p++); - if (y == -1) { - return -1; - } - x = (x ^ (Py_uhash_t)y) * mult; - /* the cast might truncate len; that doesn't change hash stability */ - mult += (Py_uhash_t)(82520UL + (size_t)len + (size_t)len); - } - x += 97531UL; - if (x == (Py_uhash_t)-1) { - x = (Py_uhash_t)-2; +static PyObject * +record_item(ApgRecordObject *op, Py_ssize_t i) +{ + ApgRecordObject *a = (ApgRecordObject *)op; + if (i < 0 || i >= Py_SIZE(a)) { + PyErr_SetString(PyExc_IndexError, "record index out of range"); + return NULL; } - v->self_hash = (Py_hash_t)x; - return (Py_hash_t)x; + return Py_NewRef(a->ob_item[i]); } -#endif - - static PyObject * record_richcompare(PyObject *v, PyObject *w, int op) { @@ -234,36 +307,42 @@ record_richcompare(PyObject *v, PyObject *w, int op) int v_is_record = 0; int w_is_record = 0; int comp; + PyTypeObject *v_type = Py_TYPE(v); + PyTypeObject *w_type = Py_TYPE(w); + + record_module_state *state; + state = find_module_state_by_def(v_type); + if (state == NULL) { + PyErr_Clear(); + state = find_module_state_by_def(w_type); + } if (PyTuple_Check(v)) { v_is_tuple = 1; } - else if (ApgRecord_CheckExact(v)) { + else if (v_type == state->ApgRecord_Type) { v_is_record = 1; } - else if (!ApgRecord_Check(v)) { + else if (!PyObject_TypeCheck(v, state->ApgRecord_Type)) { Py_RETURN_NOTIMPLEMENTED; } if (PyTuple_Check(w)) { w_is_tuple = 1; } - else if (ApgRecord_CheckExact(w)) { + else if (w_type == state->ApgRecord_Type) { w_is_record = 1; } - else if (!ApgRecord_Check(w)) { + else if (!PyObject_TypeCheck(w, state->ApgRecord_Type)) { Py_RETURN_NOTIMPLEMENTED; } - -#define V_ITEM(i) \ - (v_is_tuple ? \ - PyTuple_GET_ITEM(v, i) \ - : (v_is_record ? ApgRecord_GET_ITEM(v, i) : PySequence_GetItem(v, i))) -#define W_ITEM(i) \ - (w_is_tuple ? \ - PyTuple_GET_ITEM(w, i) \ - : (w_is_record ? ApgRecord_GET_ITEM(w, i) : PySequence_GetItem(w, i))) +#define V_ITEM(i) \ + (v_is_tuple ? PyTuple_GET_ITEM(v, i) \ + : (v_is_record ? ApgRecord_GET_ITEM(v, i) : PySequence_GetItem(v, i))) +#define W_ITEM(i) \ + (w_is_tuple ? PyTuple_GET_ITEM(w, i) \ + : (w_is_record ? ApgRecord_GET_ITEM(w, i) : PySequence_GetItem(w, i))) vlen = Py_SIZE(v); wlen = Py_SIZE(w); @@ -296,13 +375,26 @@ record_richcompare(PyObject *v, PyObject *w, int op) /* No more items to compare -- compare sizes */ int cmp; switch (op) { - case Py_LT: cmp = vlen < wlen; break; - case Py_LE: cmp = vlen <= wlen; break; - case Py_EQ: cmp = vlen == wlen; break; - case Py_NE: cmp = vlen != wlen; break; - case Py_GT: cmp = vlen > wlen; break; - case Py_GE: cmp = vlen >= wlen; break; - default: return NULL; /* cannot happen */ + case Py_LT: + cmp = vlen < wlen; + break; + case Py_LE: + cmp = vlen <= wlen; + break; + case Py_EQ: + cmp = vlen == wlen; + break; + case Py_NE: + cmp = vlen != wlen; + break; + case Py_GT: + cmp = vlen > wlen; + break; + case Py_GE: + cmp = vlen >= wlen; + break; + default: + Py_UNREACHABLE(); } if (cmp) { Py_RETURN_TRUE; @@ -327,26 +419,12 @@ record_richcompare(PyObject *v, PyObject *w, int op) #undef W_ITEM } - -static PyObject * -record_item(ApgRecordObject *o, Py_ssize_t i) -{ - if (i < 0 || i >= Py_SIZE(o)) { - PyErr_SetString(PyExc_IndexError, "record index out of range"); - return NULL; - } - Py_INCREF(o->ob_item[i]); - return o->ob_item[i]; -} - - typedef enum item_by_name_result { APG_ITEM_FOUND = 0, APG_ERROR = -1, APG_ITEM_NOT_FOUND = -2 } item_by_name_result_t; - /* Lookup a record value by its name. Return 0 on success, -2 if the * value was not found (with KeyError set), and -1 on all other errors. */ @@ -395,62 +473,62 @@ record_item_by_name(ApgRecordObject *o, PyObject *item, PyObject **result) return APG_ERROR; } - static PyObject * -record_subscript(ApgRecordObject* o, PyObject* item) +record_subscript(PyObject *op, PyObject *item) { + ApgRecordObject *self = (ApgRecordObject *)op; + if (PyIndex_Check(item)) { Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); if (i == -1 && PyErr_Occurred()) return NULL; if (i < 0) { - i += Py_SIZE(o); + i += Py_SIZE(self); } - return record_item(o, i); + return record_item(self, i); } else if (PySlice_Check(item)) { - Py_ssize_t start, stop, step, slicelength, cur, i; - PyObject* result; - PyObject* it; + Py_ssize_t start, stop, step, cur, slicelength, i; + PyObject *it; PyObject **src, **dest; - if (PySlice_GetIndicesEx( - item, - Py_SIZE(o), - &start, &stop, &step, &slicelength) < 0) - { + if (PySlice_Unpack(item, &start, &stop, &step) < 0) { return NULL; } + slicelength = PySlice_AdjustIndices(Py_SIZE(self), &start, &stop, step); if (slicelength <= 0) { return PyTuple_New(0); } + else if (start == 0 && step == 1 && slicelength == Py_SIZE(self) && + PyTuple_CheckExact(self)) { + return Py_NewRef(self); + } else { - result = PyTuple_New(slicelength); - if (!result) return NULL; + PyTupleObject *result = (PyTupleObject *)PyTuple_New(slicelength); + if (!result) + return NULL; - src = o->ob_item; - dest = ((PyTupleObject *)result)->ob_item; + src = self->ob_item; + dest = result->ob_item; for (cur = start, i = 0; i < slicelength; cur += step, i++) { - it = src[cur]; - Py_INCREF(it); + it = Py_NewRef(src[cur]); dest[i] = it; } - return result; + return (PyObject *)result; } } else { - PyObject* result; + PyObject *result; - if (record_item_by_name(o, item, &result) < 0) + if (record_item_by_name(self, item, &result) < 0) return NULL; else return result; } } - static const char * get_typename(PyTypeObject *type) { @@ -465,13 +543,13 @@ get_typename(PyTypeObject *type) return s; } - static PyObject * -record_repr(ApgRecordObject *v) +record_repr(PyObject *self) { + ApgRecordObject *v = (ApgRecordObject *)self; Py_ssize_t i, n; - PyObject *keys_iter, *type_prefix; - _PyUnicodeWriter writer; + PyObject *keys_iter; + PyUnicodeWriter *writer; n = Py_SIZE(v); if (n == 0) { @@ -491,144 +569,137 @@ record_repr(ApgRecordObject *v) } return NULL; } + writer = PyUnicodeWriter_Create(12); /* */ - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; - writer.min_length = 12; /* */ - - type_prefix = PyUnicode_FromFormat("<%s ", get_typename(Py_TYPE(v))); - if (_PyUnicodeWriter_WriteStr(&writer, type_prefix) < 0) { - Py_DECREF(type_prefix); + if (PyUnicodeWriter_Format(writer, "<%s ", get_typename(Py_TYPE(v))) < 0) { goto error; } - Py_DECREF(type_prefix); for (i = 0; i < n; ++i) { + int res; PyObject *key; - PyObject *key_repr; - PyObject *val_repr; - if (i > 0) { - if (_PyUnicodeWriter_WriteChar(&writer, ' ') < 0) { + if (i > 0) + if (PyUnicodeWriter_WriteChar(writer, ' ') < 0) goto error; - } - } - - if (Py_EnterRecursiveCall(" while getting the repr of a record")) { - goto error; - } - val_repr = PyObject_Repr(v->ob_item[i]); - Py_LeaveRecursiveCall(); - if (val_repr == NULL) { - goto error; - } key = PyIter_Next(keys_iter); if (key == NULL) { - Py_DECREF(val_repr); PyErr_SetString(PyExc_RuntimeError, "invalid record mapping"); goto error; } - key_repr = PyObject_Str(key); + res = PyUnicodeWriter_WriteStr(writer, key); Py_DECREF(key); - if (key_repr == NULL) { - Py_DECREF(val_repr); + if (res < 0) goto error; - } - if (_PyUnicodeWriter_WriteStr(&writer, key_repr) < 0) { - Py_DECREF(key_repr); - Py_DECREF(val_repr); + if (PyUnicodeWriter_WriteChar(writer, '=') < 0) goto error; - } - Py_DECREF(key_repr); - if (_PyUnicodeWriter_WriteChar(&writer, '=') < 0) { - Py_DECREF(val_repr); + if (Py_EnterRecursiveCall(" while getting the repr of a record")) goto error; - } - - if (_PyUnicodeWriter_WriteStr(&writer, val_repr) < 0) { - Py_DECREF(val_repr); + res = PyUnicodeWriter_WriteRepr(writer, v->ob_item[i]); + Py_LeaveRecursiveCall(); + if (res < 0) goto error; - } - Py_DECREF(val_repr); } - writer.overallocate = 0; - if (_PyUnicodeWriter_WriteChar(&writer, '>') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '>') < 0) goto error; - } Py_DECREF(keys_iter); Py_ReprLeave((PyObject *)v); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_DECREF(keys_iter); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); Py_ReprLeave((PyObject *)v); return NULL; } - - static PyObject * -record_values(PyObject *o, PyObject *args) -{ - return record_iter(o); -} - +record_new_iter(ApgRecordObject *, const record_module_state *); static PyObject * -record_keys(PyObject *o, PyObject *args) +record_iter(PyObject *seq) { - if (!ApgRecord_Check(o)) { - PyErr_BadInternalCall(); + ApgRecordObject *r = (ApgRecordObject *)seq; + record_module_state *state; + + state = find_module_state_by_def(Py_TYPE(seq)); + if (state == NULL) { return NULL; } - return PyObject_GetIter(((ApgRecordObject*)o)->desc->keys); + return record_new_iter(r, state); } - static PyObject * -record_items(PyObject *o, PyObject *args) +record_values(PyObject *self, PyTypeObject *defcls, PyObject *const *args, + size_t nargsf, PyObject *kwnames) { - if (!ApgRecord_Check(o)) { - PyErr_BadInternalCall(); + ApgRecordObject *r = (ApgRecordObject *)self; + record_module_state *state = get_module_state_from_type(defcls); + + if (state == NULL) return NULL; - } - return record_new_items_iter(o); + return record_new_iter(r, state); } +static PyObject * +record_keys(PyObject *self, PyTypeObject *defcls, PyObject *const *args, + size_t nargsf, PyObject *kwnames) +{ + ApgRecordObject *r = (ApgRecordObject *)self; + return PyObject_GetIter(r->desc->keys); +} -static int -record_contains(ApgRecordObject *o, PyObject *arg) +static PyObject * +record_new_items_iter(ApgRecordObject *, const record_module_state *); + +static PyObject * +record_items(PyObject *self, PyTypeObject *defcls, PyObject *const *args, + size_t nargsf, PyObject *kwnames) { - if (!ApgRecord_Check(o)) { - PyErr_BadInternalCall(); - return -1; - } + ApgRecordObject *r = (ApgRecordObject *)self; + record_module_state *state = get_module_state_from_type(defcls); - return PySequence_Contains(o->desc->mapping, arg); -} + if (state == NULL) + return NULL; + return record_new_items_iter(r, state); +} static PyObject * -record_get(ApgRecordObject* o, PyObject* args) +record_get(PyObject *self, PyTypeObject *defcls, PyObject *const *args, + size_t nargsf, PyObject *kwnames) { + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); PyObject *key; PyObject *defval = Py_None; PyObject *val = NULL; int res; - if (!PyArg_UnpackTuple(args, "get", 1, 2, &key, &defval)) + if (nargs == 2) { + key = args[0]; + defval = args[1]; + } else if (nargs == 1) { + key = args[0]; + } else { + PyErr_Format(PyExc_TypeError, + "Record.get() expected 1 or 2 arguments, got %zd", + nargs); + } + + if (kwnames != NULL && PyTuple_GET_SIZE(kwnames) != 0) { + PyErr_SetString(PyExc_TypeError, "Record.get() takes no keyword arguments"); return NULL; + } - res = record_item_by_name(o, key, &val); + res = record_item_by_name((ApgRecordObject *)self, key, &val); if (res == APG_ITEM_NOT_FOUND) { PyErr_Clear(); Py_INCREF(defval); @@ -638,74 +709,82 @@ record_get(ApgRecordObject* o, PyObject* args) return val; } +static PyObject * +record_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + record_module_state *state; -static PySequenceMethods record_as_sequence = { - (lenfunc)record_length, /* sq_length */ - 0, /* sq_concat */ - 0, /* sq_repeat */ - (ssizeargfunc)record_item, /* sq_item */ - 0, /* sq_slice */ - 0, /* sq_ass_item */ - 0, /* sq_ass_slice */ - (objobjproc)record_contains, /* sq_contains */ -}; - + state = get_module_state_from_type(type); + if (state == NULL) { + return NULL; + } -static PyMappingMethods record_as_mapping = { - (lenfunc)record_length, /* mp_length */ - (binaryfunc)record_subscript, /* mp_subscript */ - 0 /* mp_ass_subscript */ -}; + if (type == state->ApgRecord_Type) { + PyErr_Format(PyExc_TypeError, "cannot create '%.100s' instances", type->tp_name); + return NULL; + } + /* For subclasses, use the default allocation */ + return type->tp_alloc(type, 0); +} static PyMethodDef record_methods[] = { - {"values", (PyCFunction)record_values, METH_NOARGS}, - {"keys", (PyCFunction)record_keys, METH_NOARGS}, - {"items", (PyCFunction)record_items, METH_NOARGS}, - {"get", (PyCFunction)record_get, METH_VARARGS}, - {NULL, NULL} /* sentinel */ + {"values", _PyCFunction_CAST(record_values), METH_METHOD | METH_FASTCALL | METH_KEYWORDS}, + {"keys", _PyCFunction_CAST(record_keys), METH_METHOD | METH_FASTCALL | METH_KEYWORDS}, + {"items", _PyCFunction_CAST(record_items), METH_METHOD | METH_FASTCALL | METH_KEYWORDS}, + {"get", _PyCFunction_CAST(record_get), METH_METHOD | METH_FASTCALL | METH_KEYWORDS}, + {NULL, NULL} /* sentinel */ }; - -PyTypeObject ApgRecord_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "asyncpg.Record", - .tp_basicsize = sizeof(ApgRecordObject) - sizeof(PyObject *), - .tp_itemsize = sizeof(PyObject *), - .tp_dealloc = (destructor)record_dealloc, - .tp_repr = (reprfunc)record_repr, - .tp_as_sequence = &record_as_sequence, - .tp_as_mapping = &record_as_mapping, - .tp_hash = (hashfunc)record_hash, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - .tp_traverse = (traverseproc)record_traverse, - .tp_richcompare = record_richcompare, - .tp_iter = record_iter, - .tp_methods = record_methods, - .tp_free = PyObject_GC_Del, +static PyType_Slot ApgRecord_TypeSlots[] = { + {Py_tp_dealloc, record_dealloc}, + {Py_tp_repr, record_repr}, + {Py_tp_hash, record_hash}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, record_traverse}, + {Py_tp_richcompare, record_richcompare}, + {Py_tp_iter, record_iter}, + {Py_tp_methods, record_methods}, + {Py_tp_new, record_new}, + {Py_tp_free, PyObject_GC_Del}, + {Py_sq_length, record_length}, + {Py_sq_item, record_item}, + {Py_sq_contains, record_contains}, + {Py_mp_length, record_length}, + {Py_mp_subscript, record_subscript}, + {0, NULL}, }; +#ifndef Py_TPFLAGS_IMMUTABLETYPE +#define Py_TPFLAGS_IMMUTABLETYPE 0 +#endif + +static PyType_Spec ApgRecord_TypeSpec = { + .name = "asyncpg.protocol.record.Record", + .basicsize = sizeof(ApgRecordObject) - sizeof(PyObject *), + .itemsize = sizeof(PyObject *), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = ApgRecord_TypeSlots, +}; /* Record Iterator */ - typedef struct { - PyObject_HEAD - Py_ssize_t it_index; + PyObject_HEAD Py_ssize_t it_index; ApgRecordObject *it_seq; /* Set to NULL when iterator is exhausted */ } ApgRecordIterObject; - static void record_iter_dealloc(ApgRecordIterObject *it) { + PyTypeObject *tp = Py_TYPE(it); PyObject_GC_UnTrack(it); Py_CLEAR(it->it_seq); PyObject_GC_Del(it); + Py_DECREF(tp); } - static int record_iter_traverse(ApgRecordIterObject *it, visitproc visit, void *arg) { @@ -713,7 +792,6 @@ record_iter_traverse(ApgRecordIterObject *it, visitproc visit, void *arg) return 0; } - static PyObject * record_iter_next(ApgRecordIterObject *it) { @@ -724,7 +802,6 @@ record_iter_next(ApgRecordIterObject *it) seq = it->it_seq; if (seq == NULL) return NULL; - assert(ApgRecord_Check(seq)); if (it->it_index < Py_SIZE(seq)) { item = ApgRecord_GET_ITEM(seq, it->it_index); @@ -738,7 +815,6 @@ record_iter_next(ApgRecordIterObject *it) return NULL; } - static PyObject * record_iter_len(ApgRecordIterObject *it) { @@ -749,73 +825,63 @@ record_iter_len(ApgRecordIterObject *it) return PyLong_FromSsize_t(len); } - -PyDoc_STRVAR(record_iter_len_doc, - "Private method returning an estimate of len(list(it))."); - +PyDoc_STRVAR(record_iter_len_doc, "Private method returning an estimate of len(list(it))."); static PyMethodDef record_iter_methods[] = { - {"__length_hint__", (PyCFunction)record_iter_len, METH_NOARGS, - record_iter_len_doc}, - {NULL, NULL} /* sentinel */ + {"__length_hint__", (PyCFunction)record_iter_len, METH_NOARGS, record_iter_len_doc}, + {NULL, NULL} /* sentinel */ }; - -PyTypeObject ApgRecordIter_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "RecordIterator", - .tp_basicsize = sizeof(ApgRecordIterObject), - .tp_dealloc = (destructor)record_iter_dealloc, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)record_iter_traverse, - .tp_iter = PyObject_SelfIter, - .tp_iternext = (iternextfunc)record_iter_next, - .tp_methods = record_iter_methods, +static PyType_Slot ApgRecordIter_TypeSlots[] = { + {Py_tp_dealloc, (destructor)record_iter_dealloc}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, (traverseproc)record_iter_traverse}, + {Py_tp_iter, PyObject_SelfIter}, + {Py_tp_iternext, (iternextfunc)record_iter_next}, + {Py_tp_methods, record_iter_methods}, + {0, NULL}, }; +static PyType_Spec ApgRecordIter_TypeSpec = { + .name = "asyncpg.protocol.record.RecordIterator", + .basicsize = sizeof(ApgRecordIterObject), + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, + .slots = ApgRecordIter_TypeSlots, +}; static PyObject * -record_iter(PyObject *seq) +record_new_iter(ApgRecordObject *r, const record_module_state *state) { ApgRecordIterObject *it; - - if (!ApgRecord_Check(seq)) { - PyErr_BadInternalCall(); - return NULL; - } - it = PyObject_GC_New(ApgRecordIterObject, &ApgRecordIter_Type); + it = PyObject_GC_New(ApgRecordIterObject, state->ApgRecordIter_Type); if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (ApgRecordObject *)seq; + Py_INCREF(r); + it->it_seq = r; PyObject_GC_Track(it); return (PyObject *)it; } - /* Record Items Iterator */ - typedef struct { - PyObject_HEAD - Py_ssize_t it_index; + PyObject_HEAD Py_ssize_t it_index; PyObject *it_key_iter; ApgRecordObject *it_seq; /* Set to NULL when iterator is exhausted */ } ApgRecordItemsObject; - static void record_items_dealloc(ApgRecordItemsObject *it) { + PyTypeObject *tp = Py_TYPE(it); PyObject_GC_UnTrack(it); Py_CLEAR(it->it_key_iter); Py_CLEAR(it->it_seq); PyObject_GC_Del(it); + Py_DECREF(tp); } - static int record_items_traverse(ApgRecordItemsObject *it, visitproc visit, void *arg) { @@ -824,7 +890,6 @@ record_items_traverse(ApgRecordItemsObject *it, visitproc visit, void *arg) return 0; } - static PyObject * record_items_next(ApgRecordItemsObject *it) { @@ -838,7 +903,6 @@ record_items_next(ApgRecordItemsObject *it) if (seq == NULL) { return NULL; } - assert(ApgRecord_Check(seq)); assert(it->it_key_iter != NULL); key = PyIter_Next(it->it_key_iter); @@ -875,7 +939,6 @@ record_items_next(ApgRecordItemsObject *it) return NULL; } - static PyObject * record_items_len(ApgRecordItemsObject *it) { @@ -886,140 +949,341 @@ record_items_len(ApgRecordItemsObject *it) return PyLong_FromSsize_t(len); } - -PyDoc_STRVAR(record_items_len_doc, - "Private method returning an estimate of len(list(it()))."); - +PyDoc_STRVAR(record_items_len_doc, "Private method returning an estimate of len(list(it()))."); static PyMethodDef record_items_methods[] = { - {"__length_hint__", (PyCFunction)record_items_len, METH_NOARGS, - record_items_len_doc}, - {NULL, NULL} /* sentinel */ + {"__length_hint__", (PyCFunction)record_items_len, METH_NOARGS, record_items_len_doc}, + {NULL, NULL} /* sentinel */ }; - -PyTypeObject ApgRecordItems_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "RecordItemsIterator", - .tp_basicsize = sizeof(ApgRecordItemsObject), - .tp_dealloc = (destructor)record_items_dealloc, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)record_items_traverse, - .tp_iter = PyObject_SelfIter, - .tp_iternext = (iternextfunc)record_items_next, - .tp_methods = record_items_methods, +static PyType_Slot ApgRecordItems_TypeSlots[] = { + {Py_tp_dealloc, (destructor)record_items_dealloc}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, (traverseproc)record_items_traverse}, + {Py_tp_iter, PyObject_SelfIter}, + {Py_tp_iternext, (iternextfunc)record_items_next}, + {Py_tp_methods, record_items_methods}, + {0, NULL}, }; +static PyType_Spec ApgRecordItems_TypeSpec = { + .name = "asyncpg.protocol.record.RecordItemsIterator", + .basicsize = sizeof(ApgRecordItemsObject), + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, + .slots = ApgRecordItems_TypeSlots, +}; static PyObject * -record_new_items_iter(PyObject *seq) +record_new_items_iter(ApgRecordObject *r, const record_module_state *state) { ApgRecordItemsObject *it; PyObject *key_iter; - if (!ApgRecord_Check(seq)) { - PyErr_BadInternalCall(); + key_iter = PyObject_GetIter(r->desc->keys); + if (key_iter == NULL) return NULL; - } - key_iter = PyObject_GetIter(((ApgRecordObject*)seq)->desc->keys); - if (key_iter == NULL) { + it = PyObject_GC_New(ApgRecordItemsObject, state->ApgRecordItems_Type); + if (it == NULL) { + Py_DECREF(key_iter); return NULL; } - it = PyObject_GC_New(ApgRecordItemsObject, &ApgRecordItems_Type); - if (it == NULL) - return NULL; - it->it_key_iter = key_iter; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (ApgRecordObject *)seq; + Py_INCREF(r); + it->it_seq = r; PyObject_GC_Track(it); return (PyObject *)it; } +/* ----------------- */ -PyTypeObject * -ApgRecord_InitTypes(void) +static void +record_desc_dealloc(ApgRecordDescObject *o) { - if (PyType_Ready(&ApgRecord_Type) < 0) { + PyTypeObject *tp = Py_TYPE(o); + PyObject_GC_UnTrack(o); + Py_CLEAR(o->mapping); + Py_CLEAR(o->keys); + PyObject_GC_Del(o); + Py_DECREF(tp); +} + +static int +record_desc_traverse(ApgRecordDescObject *o, visitproc visit, void *arg) +{ + Py_VISIT(o->mapping); + Py_VISIT(o->keys); + return 0; +} + +static PyObject * +record_desc_vectorcall(PyObject *type, PyObject *const *args, size_t nargsf, + PyObject *kwnames) +{ + PyObject *mapping; + PyObject *keys; + ApgRecordDescObject *o; + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + + if (kwnames != NULL && PyTuple_GET_SIZE(kwnames) != 0) { + PyErr_SetString(PyExc_TypeError, "RecordDescriptor() takes no keyword arguments"); return NULL; } - if (PyType_Ready(&ApgRecordDesc_Type) < 0) { + if (nargs != 2) { + PyErr_Format(PyExc_TypeError, + "RecordDescriptor() takes exactly 2 arguments (%zd given)", nargs); return NULL; } - if (PyType_Ready(&ApgRecordIter_Type) < 0) { + mapping = args[0]; + keys = args[1]; + + if (!PyTuple_CheckExact(keys)) { + PyErr_SetString(PyExc_TypeError, "keys must be a tuple"); return NULL; } - if (PyType_Ready(&ApgRecordItems_Type) < 0) { + o = PyObject_GC_New(ApgRecordDescObject, (PyTypeObject *)type); + if (o == NULL) { return NULL; } - return &ApgRecord_Type; + Py_INCREF(mapping); + o->mapping = mapping; + + Py_INCREF(keys); + o->keys = keys; + + PyObject_GC_Track(o); + return (PyObject *)o; } +/* Fallback wrapper for when there is no vectorcall support */ +static PyObject * +record_desc_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *const *args_array; + size_t nargsf; + PyObject *kwnames = NULL; -/* ----------------- */ + if (kwargs != NULL && PyDict_GET_SIZE(kwargs) != 0) { + PyErr_SetString(PyExc_TypeError, + "RecordDescriptor() takes no keyword arguments"); + return NULL; + } + if (!PyTuple_Check(args)) { + PyErr_SetString(PyExc_TypeError, + "args must be a tuple"); + return NULL; + } -static void -record_desc_dealloc(ApgRecordDescObject *o) + nargsf = (size_t)PyTuple_GET_SIZE(args); + args_array = &PyTuple_GET_ITEM(args, 0); + + return record_desc_vectorcall((PyObject *)type, args_array, nargsf, kwnames); +} + +static PyObject * +record_desc_make_record(PyObject *desc, PyTypeObject *desc_type, + PyObject *const *args, Py_ssize_t nargs, + PyObject *kwnames) { - PyObject_GC_UnTrack(o); - Py_CLEAR(o->mapping); - Py_CLEAR(o->keys); - PyObject_GC_Del(o); + PyObject *type_obj; + Py_ssize_t size; + record_module_state *state = get_module_state_from_type(desc_type); + + if (state == NULL) { + return NULL; + } + + if (nargs != 2) { + PyErr_Format(PyExc_TypeError, + "RecordDescriptor.make_record() takes exactly 2 arguments (%zd given)", + nargs); + return NULL; + } + + if (kwnames != NULL && PyTuple_GET_SIZE(kwnames) != 0) { + PyErr_SetString(PyExc_TypeError, + "RecordDescriptor.make_record() takes no keyword arguments"); + return NULL; + } + + type_obj = args[0]; + size = PyLong_AsSsize_t(args[1]); + if (size == -1 && PyErr_Occurred()) { + return NULL; + } + + if (!PyType_Check(type_obj)) { + PyErr_SetString(PyExc_TypeError, + "RecordDescriptor.make_record(): first argument must be a type"); + return NULL; + } + + return make_record((PyTypeObject *)type_obj, desc, size, state); } +static PyMethodDef record_desc_methods[] = { + {"make_record", _PyCFunction_CAST(record_desc_make_record), + METH_FASTCALL | METH_METHOD | METH_KEYWORDS}, + {NULL, NULL} /* sentinel */ +}; + +static PyType_Slot ApgRecordDesc_TypeSlots[] = { +#ifdef Py_tp_vectorcall + {Py_tp_vectorcall, (vectorcallfunc)record_desc_vectorcall}, +#endif + {Py_tp_new, (newfunc)record_desc_new}, + {Py_tp_dealloc, (destructor)record_desc_dealloc}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, (traverseproc)record_desc_traverse}, + {Py_tp_methods, record_desc_methods}, + {0, NULL}, +}; + +static PyType_Spec ApgRecordDesc_TypeSpec = { + .name = "asyncpg.protocol.record.RecordDescriptor", + .basicsize = sizeof(ApgRecordDescObject), + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE, + .slots = ApgRecordDesc_TypeSlots, +}; + +/* + * Module init + */ + +static PyMethodDef record_module_methods[] = {{NULL, NULL, 0, NULL}}; static int -record_desc_traverse(ApgRecordDescObject *o, visitproc visit, void *arg) +record_module_exec(PyObject *module) { - Py_VISIT(o->mapping); - Py_VISIT(o->keys); + record_module_state *state = get_module_state(module); + if (state == NULL) { + return -1; + } + + if (PyThread_tss_create(&state->freelist_key) != 0) { + PyErr_SetString( + PyExc_SystemError, + "failed to create TSS key for record freelist"); + return -1; + } + +#define CREATE_TYPE(m, tp, spec) \ + do { \ + tp = (PyTypeObject *)PyType_FromModuleAndSpec(m, spec, NULL); \ + if (tp == NULL) \ + goto error; \ + if (PyModule_AddType(m, tp) < 0) \ + goto error; \ + } while (0) + + CREATE_TYPE(module, state->ApgRecord_Type, &ApgRecord_TypeSpec); + CREATE_TYPE(module, state->ApgRecordDesc_Type, &ApgRecordDesc_TypeSpec); + CREATE_TYPE(module, state->ApgRecordIter_Type, &ApgRecordIter_TypeSpec); + CREATE_TYPE(module, state->ApgRecordItems_Type, &ApgRecordItems_TypeSpec); + +#undef CREATE_TYPE + return 0; + +error: + Py_CLEAR(state->ApgRecord_Type); + Py_CLEAR(state->ApgRecordDesc_Type); + Py_CLEAR(state->ApgRecordIter_Type); + Py_CLEAR(state->ApgRecordItems_Type); + return -1; } +static int +record_module_traverse(PyObject *module, visitproc visit, void *arg) +{ + record_module_state *state = get_module_state(module); + if (state == NULL) { + return 0; + } -PyTypeObject ApgRecordDesc_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "RecordDescriptor", - .tp_basicsize = sizeof(ApgRecordDescObject), - .tp_dealloc = (destructor)record_desc_dealloc, - .tp_getattro = PyObject_GenericGetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)record_desc_traverse, - .tp_iter = PyObject_SelfIter, -}; + Py_VISIT(state->ApgRecord_Type); + Py_VISIT(state->ApgRecordDesc_Type); + Py_VISIT(state->ApgRecordIter_Type); + Py_VISIT(state->ApgRecordItems_Type); + return 0; +} -PyObject * -ApgRecordDesc_New(PyObject *mapping, PyObject *keys) +static int +record_module_clear(PyObject *module) { - ApgRecordDescObject *o; - - if (!mapping || !keys || !PyTuple_CheckExact(keys)) { - PyErr_BadInternalCall(); - return NULL; + record_module_state *state = get_module_state(module); + if (state == NULL) { + return 0; } - o = PyObject_GC_New(ApgRecordDescObject, &ApgRecordDesc_Type); - if (o == NULL) { - return NULL; + if (PyThread_tss_is_created(&state->freelist_key)) { + record_freelist_state *freelist = + (record_freelist_state *)PyThread_tss_get(&state->freelist_key); + if (freelist != NULL) { + for (int i = 0; i < ApgRecord_MAXSAVESIZE; i++) { + ApgRecordObject *op = freelist->freelist[i]; + while (op != NULL) { + ApgRecordObject *next = (ApgRecordObject *)(op->ob_item[0]); + PyObject_GC_Del(op); + op = next; + } + freelist->freelist[i] = NULL; + freelist->numfree[i] = 0; + } + PyMem_Free(freelist); + PyThread_tss_set(&state->freelist_key, NULL); + } + + PyThread_tss_delete(&state->freelist_key); } - Py_INCREF(mapping); - o->mapping = mapping; + Py_CLEAR(state->ApgRecord_Type); + Py_CLEAR(state->ApgRecordDesc_Type); + Py_CLEAR(state->ApgRecordIter_Type); + Py_CLEAR(state->ApgRecordItems_Type); - Py_INCREF(keys); - o->keys = keys; + return 0; +} - PyObject_GC_Track(o); - return (PyObject *) o; +static void +record_module_free(void *module) +{ + record_module_clear((PyObject *)module); +} + +static PyModuleDef_Slot record_module_slots[] = { + {Py_mod_exec, record_module_exec}, +#ifdef Py_mod_multiple_interpreters + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, +#endif +#ifdef Py_mod_gil + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, +#endif + {0, NULL}, +}; + +static struct PyModuleDef _recordmodule = { + PyModuleDef_HEAD_INIT, + .m_name = "asyncpg.protocol.record", + .m_size = sizeof(record_module_state), + .m_methods = record_module_methods, + .m_slots = record_module_slots, + .m_traverse = record_module_traverse, + .m_clear = record_module_clear, + .m_free = record_module_free, +}; + +PyMODINIT_FUNC +PyInit_record(void) +{ + return PyModuleDef_Init(&_recordmodule); } diff --git a/asyncpg/protocol/record/recordobj.h b/asyncpg/protocol/record/recordobj.h index 373c8967..78caffcc 100644 --- a/asyncpg/protocol/record/recordobj.h +++ b/asyncpg/protocol/record/recordobj.h @@ -1,14 +1,7 @@ #ifndef APG_RECORDOBJ_H #define APG_RECORDOBJ_H -#include "Python.h" - - -/* Largest record to save on free list */ -#define ApgRecord_MAXSAVESIZE 20 - -/* Maximum number of records of each size to save */ -#define ApgRecord_MAXFREELIST 2000 +#include typedef struct { @@ -31,23 +24,10 @@ typedef struct { } ApgRecordObject; -extern PyTypeObject ApgRecord_Type; -extern PyTypeObject ApgRecordIter_Type; -extern PyTypeObject ApgRecordItems_Type; - -extern PyTypeObject ApgRecordDesc_Type; - -#define ApgRecord_Check(self) PyObject_TypeCheck(self, &ApgRecord_Type) -#define ApgRecord_CheckExact(o) (Py_TYPE(o) == &ApgRecord_Type) -#define ApgRecordDesc_CheckExact(o) (Py_TYPE(o) == &ApgRecordDesc_Type) - #define ApgRecord_SET_ITEM(op, i, v) \ (((ApgRecordObject *)(op))->ob_item[i] = v) + #define ApgRecord_GET_ITEM(op, i) \ (((ApgRecordObject *)(op))->ob_item[i]) -PyTypeObject *ApgRecord_InitTypes(void); -PyObject *ApgRecord_New(PyTypeObject *, PyObject *, Py_ssize_t); -PyObject *ApgRecordDesc_New(PyObject *, PyObject *); - #endif diff --git a/asyncpg/protocol/record/__init__.pxd b/asyncpg/protocol/recordcapi.pxd similarity index 64% rename from asyncpg/protocol/record/__init__.pxd rename to asyncpg/protocol/recordcapi.pxd index 43ac5e33..e52798fb 100644 --- a/asyncpg/protocol/record/__init__.pxd +++ b/asyncpg/protocol/recordcapi.pxd @@ -10,10 +10,5 @@ cimport cpython cdef extern from "record/recordobj.h": - cpython.PyTypeObject *ApgRecord_InitTypes() except NULL - - int ApgRecord_CheckExact(object) - object ApgRecord_New(type, object, int) void ApgRecord_SET_ITEM(object, int, object) - - object ApgRecordDesc_New(object, object) + object RecordDescriptor(object, object) diff --git a/pyproject.toml b/pyproject.toml index 131a7372..7ee648cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,9 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Free Threading :: 2 - Beta", "Topic :: Database :: Front-Ends", ] dependencies = [ @@ -45,21 +47,22 @@ test = [ 'flake8~=6.1', 'flake8-pyi~=24.1.0', 'distro~=1.9.0', - 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.14.0"', + 'uvloop>=0.22.1; platform_system != "Windows" and python_version < "3.15.0"', 'gssapi; platform_system == "Linux"', 'k5test; platform_system == "Linux"', 'sspilib; platform_system == "Windows"', 'mypy~=1.8.0', + 'pytest', ] docs = [ - 'Sphinx~=8.1.3', + 'Sphinx~=7.4', 'sphinx_rtd_theme>=1.2.2', ] [build-system] requires = [ "setuptools>=77.0.3", - "Cython(>=0.29.24,<4.0.0)" + "Cython(>=3.1.0,<4.0.0)" ] build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py index 29e9d612..b6f66b16 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ from setuptools.command import build_ext as setuptools_build_ext -CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<4.0.0)' +CYTHON_DEPENDENCY = 'Cython(>=3.1.0,<4.0.0)' CFLAGS = ['-O2'] LDFLAGS = [] @@ -128,17 +128,26 @@ def initialize_options(self): super(build_ext, self).initialize_options() + defines = [ + "CYTHON_USE_MODULE_STATE", + "CYTHON_PEP489_MULTI_PHASE_INIT", + "CYTHON_USE_TYPE_SPECS", + ] + if os.environ.get('ASYNCPG_DEBUG'): self.cython_always = True self.cython_annotate = True self.cython_directives = "linetrace=True" - self.define = 'PG_DEBUG,CYTHON_TRACE,CYTHON_TRACE_NOGIL' self.debug = True + + defines += ["PG_DEBUG", "CYTHON_TRACE", "CYTHON_TRACE_NOGIL"] else: self.cython_always = False self.cython_annotate = None self.cython_directives = None + self.define = ",".join(defines) + def finalize_options(self): # finalize_options() may be called multiple times on the # same command object, so make sure not to override previously @@ -201,6 +210,8 @@ def finalize_options(self): directives = { 'language_level': '3', + 'freethreading_compatible': 'True', + 'subinterpreters_compatible': 'own_gil', } if self.cython_directives: @@ -231,7 +242,7 @@ def finalize_options(self): setup_requires.append(CYTHON_DEPENDENCY) -setuptools.setup( +_ = setuptools.setup( version=VERSION, ext_modules=[ setuptools.extension.Extension( @@ -240,10 +251,16 @@ def finalize_options(self): extra_compile_args=CFLAGS, extra_link_args=LDFLAGS), + setuptools.extension.Extension( + "asyncpg.protocol.record", + ["asyncpg/protocol/record/recordobj.c"], + include_dirs=['asyncpg/protocol/record/'], + extra_compile_args=CFLAGS, + extra_link_args=LDFLAGS), + setuptools.extension.Extension( "asyncpg.protocol.protocol", - ["asyncpg/protocol/record/recordobj.c", - "asyncpg/protocol/protocol.pyx"], + ["asyncpg/protocol/protocol.pyx"], include_dirs=['asyncpg/pgproto/'], extra_compile_args=CFLAGS, extra_link_args=LDFLAGS), diff --git a/tests/test_record.py b/tests/test_record.py index ef9388bb..d463c41f 100644 --- a/tests/test_record.py +++ b/tests/test_record.py @@ -151,7 +151,8 @@ def test_record_values(self): r = Record(R_AB, (42, 43)) vv = r.values() self.assertEqual(tuple(vv), (42, 43)) - self.assertTrue(repr(vv).startswith(' +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import textwrap +import threading +import unittest + +try: + from concurrent import interpreters +except ImportError: + pass +else: + class TestSubinterpreters(unittest.TestCase): + def test_record_module_loads_in_subinterpreter(self) -> None: + def run_in_subinterpreter() -> None: + interp = interpreters.create() + + try: + code = textwrap.dedent("""\ + import asyncpg.protocol.record as record + assert record.Record is not None + """) + interp.exec(code) + finally: + interp.close() + + thread = threading.Thread(target=run_in_subinterpreter) + thread.start() + thread.join() + + def test_record_module_state_isolation(self) -> None: + import asyncpg.protocol.record + + main_record_id = id(asyncpg.protocol.record.Record) + + def run_in_subinterpreter() -> None: + interp = interpreters.create() + + try: + code = textwrap.dedent(f"""\ + import asyncpg.protocol.record as record + + sub_record_id = id(record.Record) + main_id = {main_record_id} + + assert sub_record_id != main_id, ( + f"Record type objects are the same: " + f"{{sub_record_id}} == {{main_id}}. " + f"This indicates shared global state." + ) + """) + interp.exec(code) + finally: + interp.close() + + thread = threading.Thread(target=run_in_subinterpreter) + thread.start() + thread.join() + + +if __name__ == "__main__": + _ = unittest.main() From 648b35f18199ebf2a30009376a6e9060cf7ad789 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 24 Nov 2025 11:07:59 -0800 Subject: [PATCH 187/193] Bump Cython to 3.2.1 (#1288) Should fix refleak issues --- pyproject.toml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7ee648cd..e2b18388 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,7 +62,7 @@ docs = [ [build-system] requires = [ "setuptools>=77.0.3", - "Cython(>=3.1.0,<4.0.0)" + "Cython(>=3.2.1,<4.0.0)" ] build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py index b6f66b16..c2332822 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ from setuptools.command import build_ext as setuptools_build_ext -CYTHON_DEPENDENCY = 'Cython(>=3.1.0,<4.0.0)' +CYTHON_DEPENDENCY = 'Cython(>=3.2.1,<4.0.0)' CFLAGS = ['-O2'] LDFLAGS = [] From 07fe5122a680f5768e39fc28d68c94b185037a52 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 24 Nov 2025 11:11:34 -0800 Subject: [PATCH 188/193] Bump pgproto --- asyncpg/pgproto | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/pgproto b/asyncpg/pgproto index 1c3cad14..a29a6f6a 160000 --- a/asyncpg/pgproto +++ b/asyncpg/pgproto @@ -1 +1 @@ -Subproject commit 1c3cad14d53c8f3088106f4eab8f612b7293569b +Subproject commit a29a6f6aaa09013cb33ffadb8dd57e21d671ab55 From e534e5f15c73525a6509143b3828335517360f1b Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 24 Nov 2025 11:13:14 -0800 Subject: [PATCH 189/193] Bump cibuildwheel --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 263406cb..353ed824 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -83,7 +83,7 @@ jobs: - uses: actions/setup-python@v6 with: python-version: "3.x" - - run: pip install cibuildwheel==3.2.1 + - run: pip install cibuildwheel==3.3.0 - id: set-matrix run: | MATRIX_INCLUDE=$( @@ -123,7 +123,7 @@ jobs: if: runner.os == 'Linux' uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 - - uses: pypa/cibuildwheel@9c00cb4f6b517705a3794b22395aedc36257242c # v3.2.1 + - uses: pypa/cibuildwheel@63fd63b352a9a8bdcc24791c9dbee952ee9a8abc # v3.3.0 with: only: ${{ matrix.only }} env: From 508cae6441968ef7613a623fece7083cce66c2b3 Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 24 Nov 2025 14:00:06 -0800 Subject: [PATCH 190/193] Test on PostgreSQL 18 (#1290) --- .github/workflows/tests.yml | 6 ++++-- README.rst | 2 +- docs/index.rst | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 451aca9f..77e63738 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -56,7 +56,9 @@ jobs: - name: Setup PostgreSQL if: "!steps.release.outputs.is_release && matrix.os == 'macos-latest'" run: | - brew install postgresql + POSTGRES_FORMULA="postgresql@18" + brew install "$POSTGRES_FORMULA" + echo "$(brew --prefix "$POSTGRES_FORMULA")/bin" >> $GITHUB_PATH - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v6 @@ -86,7 +88,7 @@ jobs: test-postgres: strategy: matrix: - postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16", "17"] + postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16", "17", "18"] runs-on: ubuntu-latest diff --git a/README.rst b/README.rst index 70751b50..1a37296d 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ framework. You can read more about asyncpg in an introductory `blog post `_. asyncpg requires Python 3.9 or later and is supported for PostgreSQL -versions 9.5 to 17. Other PostgreSQL versions or other databases +versions 9.5 to 18. Other PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. diff --git a/docs/index.rst b/docs/index.rst index bbad4397..5ca0bef0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,7 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio`` framework. **asyncpg** requires Python 3.9 or later and is supported for PostgreSQL -versions 9.5 to 17. Other PostgreSQL versions or other databases implementing +versions 9.5 to 18. Other PostgreSQL versions or other databases implementing the PostgreSQL protocol *may* work, but are not being actively tested. Contents From 71775a67277fc0aa5bd2b9f15e848826d7078c4d Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 24 Nov 2025 14:05:20 -0800 Subject: [PATCH 191/193] asyncpg v0.31.0 Enable Python 3.14 with experimental subinterpreter/freethreading support. Improvements ============ * Add Python 3.14 support, experimental subinterpreter/freethreading support (#1279) (by @elprans in 9e42642b) * Avoid performing type introspection on known types (#1243) (by @elprans in 5c9986c4) * Make `prepare()` not use named statements by default when cache is disabled (#1245) (by @elprans in 5b14653e) * Implement connection service file functionality (#1223) (by @AndrewJackson2020 in 1d63bb15) Fixes ===== * Fix multi port connection string issue (#1222) (by @AndrewJackson2020 in 01c0db7b) * Avoid leaking connections if _can_use_connection fails (#1269) (by @yuliy-openai in e94302d2) Other ===== * Drop support for EOL Python 3.8 (#1281) (by @elprans in 6c2c4904) --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 0254079b..55a4fbf1 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -14,4 +14,4 @@ import typing -__version__: typing.Final = '0.31.0.dev0' +__version__: typing.Final = '0.31.0' From 9b2b027224058e4590fdb9e41b5750a64dab1fce Mon Sep 17 00:00:00 2001 From: Elvis Pranskevichus Date: Mon, 24 Nov 2025 15:37:15 -0800 Subject: [PATCH 192/193] Bump version to 0.32.0.dev0 --- asyncpg/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asyncpg/_version.py b/asyncpg/_version.py index 55a4fbf1..738da168 100644 --- a/asyncpg/_version.py +++ b/asyncpg/_version.py @@ -14,4 +14,4 @@ import typing -__version__: typing.Final = '0.31.0' +__version__: typing.Final = '0.32.0.dev0' From db8ecc2a38e16fb0c090aef6f5506547c2831c24 Mon Sep 17 00:00:00 2001 From: Varun Chawla <34209028+veeceey@users.noreply.github.com> Date: Fri, 27 Feb 2026 12:48:41 -0800 Subject: [PATCH 193/193] Add explicit libm linkage to fix undefined symbol errors (#1305) When building asyncpg on certain platforms (e.g., Amazon Linux 2023 with Clang 20), the C extensions fail to load at runtime with errors like "undefined symbol: log10". This happens because the extensions use math functions from libm but don't explicitly link against it. On some toolchains, libm is not implicitly linked, causing runtime symbol resolution failures even though compilation succeeds. This fix adds `-lm` to LDFLAGS on non-Windows systems to ensure proper linkage with the math library. Fixes #1297 --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index c2332822..f9fafadf 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,8 @@ if platform.uname().system != 'Windows': CFLAGS.extend(['-fsigned-char', '-Wall', '-Wsign-compare', '-Wconversion']) + # Link against libm (math library) for functions like log10() + LDFLAGS.extend(['-lm']) _ROOT = pathlib.Path(__file__).parent