From 6e9888581c86d7ca78d265e5720c365f7b62fc92 Mon Sep 17 00:00:00 2001 From: tarsil Date: Wed, 22 Mar 2023 16:15:03 +0000 Subject: [PATCH 1/8] =?UTF-8?q?=F0=9F=AA=9B=20Added=20support=20for=20SQLA?= =?UTF-8?q?lchemy=202.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Removed support for python 3.7 * Added common and dialects packages to handle the new SQLAlchemy 2.0+ --- .github/workflows/publish.yml | 4 +- .github/workflows/test-suite.yml | 112 +++++++++--------- CHANGELOG.md | 12 ++ README.md | 6 +- databases/backends/aiopg.py | 66 +++++++---- databases/backends/asyncmy.py | 60 +++++++--- databases/backends/common/__init__.py | 0 databases/backends/common/records.py | 142 +++++++++++++++++++++++ databases/backends/compilers/__init__.py | 0 databases/backends/compilers/psycopg.py | 17 +++ databases/backends/dialects/__init__.py | 0 databases/backends/dialects/psycopg.py | 46 ++++++++ databases/backends/mysql.py | 57 ++++++--- databases/backends/postgres.py | 122 ++----------------- databases/backends/sqlite.py | 71 ++++++------ databases/core.py | 2 +- docs/index.md | 10 +- mkdocs.yml | 20 ++-- requirements.txt | 6 +- scripts/clean | 6 + setup.cfg | 5 + setup.py | 6 +- tests/test_connection_options.py | 1 + tests/test_databases.py | 59 ++-------- 24 files changed, 489 insertions(+), 341 deletions(-) create mode 100644 databases/backends/common/__init__.py create mode 100644 databases/backends/common/records.py create mode 100644 databases/backends/compilers/__init__.py create mode 100644 databases/backends/compilers/psycopg.py create mode 100644 databases/backends/dialects/__init__.py create mode 100644 databases/backends/dialects/psycopg.py diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 170e9558..6ea3c65f 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,7 +4,7 @@ name: Publish on: push: tags: - - '*' + - "*" jobs: publish: @@ -15,7 +15,7 @@ jobs: - uses: "actions/checkout@v3" - uses: "actions/setup-python@v4" with: - python-version: 3.7 + python-version: 3.8 - name: "Install dependencies" run: "scripts/install" - name: "Build package & docs" diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index bc271a65..e00989b5 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -1,63 +1,63 @@ --- -name: Test Suite + name: Test Suite -on: - push: - branches: ["master"] - pull_request: - branches: ["master"] + on: + push: + branches: ["master"] + pull_request: + branches: ["master"] -jobs: - tests: - name: "Python ${{ matrix.python-version }}" - runs-on: "ubuntu-latest" + jobs: + tests: + name: "Python ${{ matrix.python-version }}" + runs-on: "ubuntu-latest" - strategy: - matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] - services: - mysql: - image: mysql:5.7 - env: - MYSQL_USER: username - MYSQL_PASSWORD: password - MYSQL_ROOT_PASSWORD: password - MYSQL_DATABASE: testsuite - ports: - - 3306:3306 - options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + services: + mysql: + image: mysql:5.7 + env: + MYSQL_USER: username + MYSQL_PASSWORD: password + MYSQL_ROOT_PASSWORD: password + MYSQL_DATABASE: testsuite + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 - postgres: - image: postgres:14 - env: - POSTGRES_USER: username - POSTGRES_PASSWORD: password - POSTGRES_DB: testsuite - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + postgres: + image: postgres:14 + env: + POSTGRES_USER: username + POSTGRES_PASSWORD: password + POSTGRES_DB: testsuite + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - steps: - - uses: "actions/checkout@v3" - - uses: "actions/setup-python@v4" - with: - python-version: "${{ matrix.python-version }}" - - name: "Install dependencies" - run: "scripts/install" - - name: "Run linting checks" - run: "scripts/check" - - name: "Build package & docs" - run: "scripts/build" - - name: "Run tests" - env: - TEST_DATABASE_URLS: | - sqlite:///testsuite, - sqlite+aiosqlite:///testsuite, - mysql://username:password@localhost:3306/testsuite, - mysql+aiomysql://username:password@localhost:3306/testsuite, - mysql+asyncmy://username:password@localhost:3306/testsuite, - postgresql://username:password@localhost:5432/testsuite, - postgresql+aiopg://username:password@127.0.0.1:5432/testsuite, - postgresql+asyncpg://username:password@localhost:5432/testsuite - run: "scripts/test" + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: "${{ matrix.python-version }}" + - name: "Install dependencies" + run: "scripts/install" + - name: "Run linting checks" + run: "scripts/check" + - name: "Build package & docs" + run: "scripts/build" + - name: "Run tests" + env: + TEST_DATABASE_URLS: | + sqlite:///testsuite, + sqlite+aiosqlite:///testsuite, + mysql://username:password@localhost:3306/testsuite, + mysql+aiomysql://username:password@localhost:3306/testsuite, + mysql+asyncmy://username:password@localhost:3306/testsuite, + postgresql://username:password@localhost:5432/testsuite, + postgresql+aiopg://username:password@127.0.0.1:5432/testsuite, + postgresql+asyncpg://username:password@localhost:5432/testsuite + run: "scripts/test" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 4816bc16..6e05e81a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## 0.8.0 (Mar 21, 2022) + +### Added + +- Support for SQLAlchemy 2.0+ +- Added internal support for the new psycopg dialect. + +### Changed + +- Removed support for python 3.7 in favour to 3.8. + - Python 3.7 support will end in June 2023 + ## 0.7.0 (Dec 18th, 2022) ### Fixed diff --git a/README.md b/README.md index ba16a104..e326df00 100644 --- a/README.md +++ b/README.md @@ -15,11 +15,12 @@ It allows you to make queries using the powerful [SQLAlchemy Core][sqlalchemy-co expression language, and provides support for PostgreSQL, MySQL, and SQLite. Databases is suitable for integrating against any async Web framework, such as [Starlette][starlette], -[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], or [FastAPI][fastapi]. +[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], +[FastAPI][fastapi] or [Esmerald][esmerald]. **Documentation**: [https://www.encode.io/databases/](https://www.encode.io/databases/) -**Requirements**: Python 3.7+ +**Requirements**: Python 3.8+ --- @@ -116,3 +117,4 @@ for examples of how to start using databases together with SQLAlchemy core expre [aiohttp]: https://github.com/aio-libs/aiohttp [tornado]: https://github.com/tornadoweb/tornado [fastapi]: https://github.com/tiangolo/fastapi +[esmerald]: https://github.com/dymmond/esmerald diff --git a/databases/backends/aiopg.py b/databases/backends/aiopg.py index 8668b2b9..0b2a6e89 100644 --- a/databases/backends/aiopg.py +++ b/databases/backends/aiopg.py @@ -5,19 +5,20 @@ import uuid import aiopg -from aiopg.sa.engine import APGCompiler_psycopg2 -from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 from sqlalchemy.engine.cursor import CursorResultMetaData from sqlalchemy.engine.interfaces import Dialect, ExecutionContext from sqlalchemy.engine.row import Row from sqlalchemy.sql import ClauseElement from sqlalchemy.sql.ddl import DDLElement -from databases.core import DatabaseURL +from databases.backends.common.records import Record, Row, create_column_maps +from databases.backends.compilers.psycopg import PGCompiler_psycopg +from databases.backends.dialects.psycopg import PGDialect_psycopg +from databases.core import LOG_EXTRA, DatabaseURL from databases.interfaces import ( ConnectionBackend, DatabaseBackend, - Record, + Record as RecordInterface, TransactionBackend, ) @@ -34,10 +35,10 @@ def __init__( self._pool: typing.Union[aiopg.Pool, None] = None def _get_dialect(self) -> Dialect: - dialect = PGDialect_psycopg2( + dialect = PGDialect_psycopg( json_serializer=json.dumps, json_deserializer=lambda x: x ) - dialect.statement_compiler = APGCompiler_psycopg2 + dialect.statement_compiler = PGCompiler_psycopg dialect.implicit_returning = True dialect.supports_native_enum = True dialect.supports_smallserial = True # 9.2+ @@ -117,15 +118,18 @@ async def release(self) -> None: await self._database._pool.release(self._connection) self._connection = None - async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: + async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect + cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) rows = await cursor.fetchall() metadata = CursorResultMetaData(context, cursor.description) - return [ + rows = [ Row( metadata, metadata._processors, @@ -135,12 +139,15 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: ) for row in rows ] + return [Record(row, result_columns, dialect, column_maps) for row in rows] finally: cursor.close() - async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: + async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterface]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) @@ -148,19 +155,20 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: if row is None: return None metadata = CursorResultMetaData(context, cursor.description) - return Row( + row = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + return Record(row, result_columns, dialect, column_maps) finally: cursor.close() async def execute(self, query: ClauseElement) -> typing.Any: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, _, _ = self._compile(query) cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) @@ -173,7 +181,7 @@ async def execute_many(self, queries: typing.List[ClauseElement]) -> None: cursor = await self._connection.cursor() try: for single_query in queries: - single_query, args, context = self._compile(single_query) + single_query, args, _, _ = self._compile(single_query) await cursor.execute(single_query, args) finally: cursor.close() @@ -182,36 +190,38 @@ async def iterate( self, query: ClauseElement ) -> typing.AsyncGenerator[typing.Any, None]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) metadata = CursorResultMetaData(context, cursor.description) async for row in cursor: - yield Row( + record = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + yield Record(record, result_columns, dialect, column_maps) finally: cursor.close() def transaction(self) -> TransactionBackend: return AiopgTransaction(self) - def _compile( - self, query: ClauseElement - ) -> typing.Tuple[str, dict, CompilationContext]: + def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: compiled = query.compile( dialect=self._dialect, compile_kwargs={"render_postcompile": True} ) - execution_context = self._dialect.execution_ctx_cls() execution_context.dialect = self._dialect if not isinstance(query, DDLElement): + compiled_params = sorted(compiled.params.items()) + args = compiled.construct_params() for key, val in args.items(): if key in compiled._bind_processors: @@ -224,11 +234,23 @@ def _compile( compiled._ad_hoc_textual, compiled._loose_column_name_matching, ) + + mapping = { + key: "$" + str(i) for i, (key, _) in enumerate(compiled_params, start=1) + } + compiled_query = compiled.string % mapping + result_map = compiled._result_columns + else: args = {} + result_map = None + compiled_query = compiled.string - logger.debug("Query: %s\nArgs: %s", compiled.string, args) - return compiled.string, args, CompilationContext(execution_context) + query_message = compiled_query.replace(" \n", " ").replace("\n", " ") + logger.debug( + "Query: %s Args: %s", query_message, repr(tuple(args)), extra=LOG_EXTRA + ) + return compiled.string, args, result_map, CompilationContext(execution_context) @property def raw_connection(self) -> aiopg.connection.Connection: diff --git a/databases/backends/asyncmy.py b/databases/backends/asyncmy.py index 749e5afe..f224c7cc 100644 --- a/databases/backends/asyncmy.py +++ b/databases/backends/asyncmy.py @@ -7,15 +7,15 @@ from sqlalchemy.dialects.mysql import pymysql from sqlalchemy.engine.cursor import CursorResultMetaData from sqlalchemy.engine.interfaces import Dialect, ExecutionContext -from sqlalchemy.engine.row import Row from sqlalchemy.sql import ClauseElement from sqlalchemy.sql.ddl import DDLElement +from databases.backends.common.records import Record, Row, create_column_maps from databases.core import LOG_EXTRA, DatabaseURL from databases.interfaces import ( ConnectionBackend, DatabaseBackend, - Record, + Record as RecordInterface, TransactionBackend, ) @@ -105,15 +105,18 @@ async def release(self) -> None: await self._database._pool.release(self._connection) self._connection = None - async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: + async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect + async with self._connection.cursor() as cursor: try: await cursor.execute(query_str, args) rows = await cursor.fetchall() metadata = CursorResultMetaData(context, cursor.description) - return [ + rows = [ Row( metadata, metadata._processors, @@ -123,12 +126,17 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: ) for row in rows ] + return [ + Record(row, result_columns, dialect, column_maps) for row in rows + ] finally: await cursor.close() - async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: + async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterface]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect async with self._connection.cursor() as cursor: try: await cursor.execute(query_str, args) @@ -136,19 +144,20 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: if row is None: return None metadata = CursorResultMetaData(context, cursor.description) - return Row( + row = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + return Record(row, result_columns, dialect, column_maps) finally: await cursor.close() async def execute(self, query: ClauseElement) -> typing.Any: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, _, _ = self._compile(query) async with self._connection.cursor() as cursor: try: await cursor.execute(query_str, args) @@ -163,7 +172,7 @@ async def execute_many(self, queries: typing.List[ClauseElement]) -> None: async with self._connection.cursor() as cursor: try: for single_query in queries: - single_query, args, context = self._compile(single_query) + single_query, args, _, _ = self._compile(single_query) await cursor.execute(single_query, args) finally: await cursor.close() @@ -172,36 +181,38 @@ async def iterate( self, query: ClauseElement ) -> typing.AsyncGenerator[typing.Any, None]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect async with self._connection.cursor() as cursor: try: await cursor.execute(query_str, args) metadata = CursorResultMetaData(context, cursor.description) async for row in cursor: - yield Row( + record = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + yield Record(record, result_columns, dialect, column_maps) finally: await cursor.close() def transaction(self) -> TransactionBackend: return AsyncMyTransaction(self) - def _compile( - self, query: ClauseElement - ) -> typing.Tuple[str, dict, CompilationContext]: + def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: compiled = query.compile( dialect=self._dialect, compile_kwargs={"render_postcompile": True} ) - execution_context = self._dialect.execution_ctx_cls() execution_context.dialect = self._dialect if not isinstance(query, DDLElement): + compiled_params = sorted(compiled.params.items()) + args = compiled.construct_params() for key, val in args.items(): if key in compiled._bind_processors: @@ -214,12 +225,23 @@ def _compile( compiled._ad_hoc_textual, compiled._loose_column_name_matching, ) + + mapping = { + key: "$" + str(i) for i, (key, _) in enumerate(compiled_params, start=1) + } + compiled_query = compiled.string % mapping + result_map = compiled._result_columns + else: args = {} + result_map = None + compiled_query = compiled.string - query_message = compiled.string.replace(" \n", " ").replace("\n", " ") - logger.debug("Query: %s Args: %s", query_message, repr(args), extra=LOG_EXTRA) - return compiled.string, args, CompilationContext(execution_context) + query_message = compiled_query.replace(" \n", " ").replace("\n", " ") + logger.debug( + "Query: %s Args: %s", query_message, repr(tuple(args)), extra=LOG_EXTRA + ) + return compiled.string, args, result_map, CompilationContext(execution_context) @property def raw_connection(self) -> asyncmy.connection.Connection: diff --git a/databases/backends/common/__init__.py b/databases/backends/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/databases/backends/common/records.py b/databases/backends/common/records.py new file mode 100644 index 00000000..77a4d8fa --- /dev/null +++ b/databases/backends/common/records.py @@ -0,0 +1,142 @@ +import json +import typing +from datetime import date, datetime + +from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.engine.row import Row as SQLRow +from sqlalchemy.sql.compiler import _CompileLabel +from sqlalchemy.sql.schema import Column +from sqlalchemy.types import TypeEngine + +from databases.interfaces import Record as RecordInterface + +DIALECT_EXCLUDE = {"postgresql"} + + +class Record(RecordInterface): + __slots__ = ( + "_row", + "_result_columns", + "_dialect", + "_column_map", + "_column_map_int", + "_column_map_full", + ) + + def __init__( + self, + row: typing.Any, + result_columns: tuple, + dialect: Dialect, + column_maps: typing.Tuple[ + typing.Mapping[typing.Any, typing.Tuple[int, TypeEngine]], + typing.Mapping[int, typing.Tuple[int, TypeEngine]], + typing.Mapping[str, typing.Tuple[int, TypeEngine]], + ], + ) -> None: + self._row = row + self._result_columns = result_columns + self._dialect = dialect + self._column_map, self._column_map_int, self._column_map_full = column_maps + + @property + def _mapping(self) -> typing.Mapping: + return self._row + + def keys(self) -> typing.KeysView: + return self._mapping.keys() + + def values(self) -> typing.ValuesView: + return self._mapping.values() + + def __getitem__(self, key: typing.Any) -> typing.Any: + if len(self._column_map) == 0: + return self._row[key] + elif isinstance(key, Column): + idx, datatype = self._column_map_full[str(key)] + elif isinstance(key, int): + idx, datatype = self._column_map_int[key] + else: + idx, datatype = self._column_map[key] + + raw = self._row[idx] + processor = datatype._cached_result_processor(self._dialect, None) + + if self._dialect.name not in DIALECT_EXCLUDE: + if isinstance(raw, dict): + raw = json.dumps(raw) + + if processor is not None and (not isinstance(raw, (datetime, date))): + return processor(raw) + return raw + + def __iter__(self) -> typing.Iterator: + return iter(self._row.keys()) + + def __len__(self) -> int: + return len(self._row) + + def __getattr__(self, name: str) -> typing.Any: + try: + return self.__getitem__(name) + except KeyError as e: + raise AttributeError(e.args[0]) from e + + +class Row(SQLRow): + def __getitem__(self, key: typing.Any) -> typing.Any: + """ + An instance of a Row in SQLAlchemy allows the access + to the Row._fields as tuple and the Row._mapping for + the values. + """ + if isinstance(key, int): + field = self._fields[key] + return self._mapping[field] + return self._mapping[key] + + def keys(self): + return self._mapping.keys() + + def values(self): + return self._mapping.values() + + def __getattr__(self, name: str) -> typing.Any: + try: + return self.__getitem__(name) + except KeyError as e: + raise AttributeError(e.args[0]) from e + + +def create_column_maps( + result_columns: typing.Any, +) -> typing.Tuple[ + typing.Mapping[typing.Any, typing.Tuple[int, TypeEngine]], + typing.Mapping[int, typing.Tuple[int, TypeEngine]], + typing.Mapping[str, typing.Tuple[int, TypeEngine]], +]: + """ + Generate column -> datatype mappings from the column definitions. + + These mappings are used throughout PostgresConnection methods + to initialize Record-s. The underlying DB driver does not do type + conversion for us so we have wrap the returned asyncpg.Record-s. + + :return: Three mappings from different ways to address a column to \ + corresponding column indexes and datatypes: \ + 1. by column identifier; \ + 2. by column index; \ + 3. by column name in Column sqlalchemy objects. + """ + column_map, column_map_int, column_map_full = {}, {}, {} + for idx, (column_name, _, column, datatype) in enumerate(result_columns): + column_map[column_name] = (idx, datatype) + column_map_int[idx] = (idx, datatype) + + # Added in SQLA 2.0 and _CompileLabels do not have _annotations + # When this happens, the mapping is on the second position + if isinstance(column[0], _CompileLabel): + column_map_full[str(column[2])] = (idx, datatype) + else: + column_map_full[str(column[0])] = (idx, datatype) + return column_map, column_map_int, column_map_full diff --git a/databases/backends/compilers/__init__.py b/databases/backends/compilers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/databases/backends/compilers/psycopg.py b/databases/backends/compilers/psycopg.py new file mode 100644 index 00000000..654c22a1 --- /dev/null +++ b/databases/backends/compilers/psycopg.py @@ -0,0 +1,17 @@ +from sqlalchemy.dialects.postgresql.psycopg import PGCompiler_psycopg + + +class APGCompiler_psycopg2(PGCompiler_psycopg): + def construct_params(self, *args, **kwargs): + pd = super().construct_params(*args, **kwargs) + + for column in self.prefetch: + pd[column.key] = self._exec_default(column.default) + + return pd + + def _exec_default(self, default): + if default.is_callable: + return default.arg(self.dialect) + else: + return default.arg diff --git a/databases/backends/dialects/__init__.py b/databases/backends/dialects/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/databases/backends/dialects/psycopg.py b/databases/backends/dialects/psycopg.py new file mode 100644 index 00000000..07bd1880 --- /dev/null +++ b/databases/backends/dialects/psycopg.py @@ -0,0 +1,46 @@ +""" +All the unique changes for the databases package +with the custom Numeric as the deprecated pypostgresql +for backwards compatibility and to make sure the +package can go to SQLAlchemy 2.0+. +""" + +import typing + +from sqlalchemy import types, util +from sqlalchemy.dialects.postgresql.base import PGDialect, PGExecutionContext +from sqlalchemy.engine import processors +from sqlalchemy.types import Float, Numeric + + +class PGExecutionContext_psycopg(PGExecutionContext): + ... + + +class PGNumeric(Numeric): + def bind_processor( + self, dialect: typing.Any + ) -> typing.Union[str, None]: # pragma: no cover + return processors.to_str + + def result_processor( + self, dialect: typing.Any, coltype: typing.Any + ) -> typing.Union[float, None]: # pragma: no cover + if self.asdecimal: + return None + else: + return processors.to_float + + +class PGDialect_psycopg(PGDialect): + colspecs = util.update_copy( + PGDialect.colspecs, + { + types.Numeric: PGNumeric, + types.Float: Float, + }, + ) + execution_ctx_cls = PGExecutionContext_psycopg + + +dialect = PGDialect_psycopg diff --git a/databases/backends/mysql.py b/databases/backends/mysql.py index 6b86042f..d93b4a7f 100644 --- a/databases/backends/mysql.py +++ b/databases/backends/mysql.py @@ -7,15 +7,15 @@ from sqlalchemy.dialects.mysql import pymysql from sqlalchemy.engine.cursor import CursorResultMetaData from sqlalchemy.engine.interfaces import Dialect, ExecutionContext -from sqlalchemy.engine.row import Row from sqlalchemy.sql import ClauseElement from sqlalchemy.sql.ddl import DDLElement +from databases.backends.common.records import Record, Row, create_column_maps from databases.core import LOG_EXTRA, DatabaseURL from databases.interfaces import ( ConnectionBackend, DatabaseBackend, - Record, + Record as RecordInterface, TransactionBackend, ) @@ -105,15 +105,17 @@ async def release(self) -> None: await self._database._pool.release(self._connection) self._connection = None - async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: + async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) rows = await cursor.fetchall() metadata = CursorResultMetaData(context, cursor.description) - return [ + rows = [ Row( metadata, metadata._processors, @@ -123,12 +125,15 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: ) for row in rows ] + return [Record(row, result_columns, dialect, column_maps) for row in rows] finally: await cursor.close() - async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: + async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterface]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) @@ -136,19 +141,20 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: if row is None: return None metadata = CursorResultMetaData(context, cursor.description) - return Row( + row = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + return Record(row, result_columns, dialect, column_maps) finally: await cursor.close() async def execute(self, query: ClauseElement) -> typing.Any: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, _, _ = self._compile(query) cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) @@ -163,7 +169,7 @@ async def execute_many(self, queries: typing.List[ClauseElement]) -> None: cursor = await self._connection.cursor() try: for single_query in queries: - single_query, args, context = self._compile(single_query) + single_query, args, _, _ = self._compile(single_query) await cursor.execute(single_query, args) finally: await cursor.close() @@ -172,36 +178,38 @@ async def iterate( self, query: ClauseElement ) -> typing.AsyncGenerator[typing.Any, None]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect cursor = await self._connection.cursor() try: await cursor.execute(query_str, args) metadata = CursorResultMetaData(context, cursor.description) async for row in cursor: - yield Row( + record = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + yield Record(record, result_columns, dialect, column_maps) finally: await cursor.close() def transaction(self) -> TransactionBackend: return MySQLTransaction(self) - def _compile( - self, query: ClauseElement - ) -> typing.Tuple[str, dict, CompilationContext]: + def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: compiled = query.compile( dialect=self._dialect, compile_kwargs={"render_postcompile": True} ) - execution_context = self._dialect.execution_ctx_cls() execution_context.dialect = self._dialect if not isinstance(query, DDLElement): + compiled_params = sorted(compiled.params.items()) + args = compiled.construct_params() for key, val in args.items(): if key in compiled._bind_processors: @@ -214,12 +222,23 @@ def _compile( compiled._ad_hoc_textual, compiled._loose_column_name_matching, ) + + mapping = { + key: "$" + str(i) for i, (key, _) in enumerate(compiled_params, start=1) + } + compiled_query = compiled.string % mapping + result_map = compiled._result_columns + else: args = {} + result_map = None + compiled_query = compiled.string - query_message = compiled.string.replace(" \n", " ").replace("\n", " ") - logger.debug("Query: %s Args: %s", query_message, repr(args), extra=LOG_EXTRA) - return compiled.string, args, CompilationContext(execution_context) + query_message = compiled_query.replace(" \n", " ").replace("\n", " ") + logger.debug( + "Query: %s Args: %s", query_message, repr(tuple(args)), extra=LOG_EXTRA + ) + return compiled.string, args, result_map, CompilationContext(execution_context) @property def raw_connection(self) -> aiomysql.connection.Connection: diff --git a/databases/backends/postgres.py b/databases/backends/postgres.py index e30c12d7..917d4e1f 100644 --- a/databases/backends/postgres.py +++ b/databases/backends/postgres.py @@ -2,13 +2,12 @@ import typing import asyncpg -from sqlalchemy.dialects.postgresql import pypostgresql from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.sql import ClauseElement from sqlalchemy.sql.ddl import DDLElement -from sqlalchemy.sql.schema import Column -from sqlalchemy.types import TypeEngine +from databases.backends.common.records import Record, create_column_maps +from databases.backends.dialects.psycopg import dialect as psycopg_dialect from databases.core import LOG_EXTRA, DatabaseURL from databases.interfaces import ( ConnectionBackend, @@ -30,7 +29,7 @@ def __init__( self._pool = None def _get_dialect(self) -> Dialect: - dialect = pypostgresql.dialect(paramstyle="pyformat") + dialect = psycopg_dialect(paramstyle="pyformat") dialect.implicit_returning = True dialect.supports_native_enum = True @@ -82,82 +81,6 @@ def connection(self) -> "PostgresConnection": return PostgresConnection(self, self._dialect) -class Record(RecordInterface): - __slots__ = ( - "_row", - "_result_columns", - "_dialect", - "_column_map", - "_column_map_int", - "_column_map_full", - ) - - def __init__( - self, - row: asyncpg.Record, - result_columns: tuple, - dialect: Dialect, - column_maps: typing.Tuple[ - typing.Mapping[typing.Any, typing.Tuple[int, TypeEngine]], - typing.Mapping[int, typing.Tuple[int, TypeEngine]], - typing.Mapping[str, typing.Tuple[int, TypeEngine]], - ], - ) -> None: - self._row = row - self._result_columns = result_columns - self._dialect = dialect - self._column_map, self._column_map_int, self._column_map_full = column_maps - - @property - def _mapping(self) -> typing.Mapping: - return self._row - - def keys(self) -> typing.KeysView: - import warnings - - warnings.warn( - "The `Row.keys()` method is deprecated to mimic SQLAlchemy behaviour, " - "use `Row._mapping.keys()` instead.", - DeprecationWarning, - ) - return self._mapping.keys() - - def values(self) -> typing.ValuesView: - import warnings - - warnings.warn( - "The `Row.values()` method is deprecated to mimic SQLAlchemy behaviour, " - "use `Row._mapping.values()` instead.", - DeprecationWarning, - ) - return self._mapping.values() - - def __getitem__(self, key: typing.Any) -> typing.Any: - if len(self._column_map) == 0: # raw query - return self._row[key] - elif isinstance(key, Column): - idx, datatype = self._column_map_full[str(key)] - elif isinstance(key, int): - idx, datatype = self._column_map_int[key] - else: - idx, datatype = self._column_map[key] - raw = self._row[idx] - processor = datatype._cached_result_processor(self._dialect, None) - - if processor is not None: - return processor(raw) - return raw - - def __iter__(self) -> typing.Iterator: - return iter(self._row.keys()) - - def __len__(self) -> int: - return len(self._row) - - def __getattr__(self, name: str) -> typing.Any: - return self._mapping.get(name) - - class PostgresConnection(ConnectionBackend): def __init__(self, database: PostgresBackend, dialect: Dialect): self._database = database @@ -180,7 +103,7 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: query_str, args, result_columns = self._compile(query) rows = await self._connection.fetch(query_str, *args) dialect = self._dialect - column_maps = self._create_column_maps(result_columns) + column_maps = create_column_maps(result_columns) return [Record(row, result_columns, dialect, column_maps) for row in rows] async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterface]: @@ -193,7 +116,7 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterfa row, result_columns, self._dialect, - self._create_column_maps(result_columns), + create_column_maps(result_columns), ) async def fetch_val( @@ -213,7 +136,7 @@ async def fetch_val( async def execute(self, query: ClauseElement) -> typing.Any: assert self._connection is not None, "Connection is not acquired" - query_str, args, result_columns = self._compile(query) + query_str, args, _ = self._compile(query) return await self._connection.fetchval(query_str, *args) async def execute_many(self, queries: typing.List[ClauseElement]) -> None: @@ -222,7 +145,7 @@ async def execute_many(self, queries: typing.List[ClauseElement]) -> None: # loop through multiple executes here, which should all end up # using the same prepared statement. for single_query in queries: - single_query, args, result_columns = self._compile(single_query) + single_query, args, _ = self._compile(single_query) await self._connection.execute(single_query, *args) async def iterate( @@ -230,7 +153,7 @@ async def iterate( ) -> typing.AsyncGenerator[typing.Any, None]: assert self._connection is not None, "Connection is not acquired" query_str, args, result_columns = self._compile(query) - column_maps = self._create_column_maps(result_columns) + column_maps = create_column_maps(result_columns) async for row in self._connection.cursor(query_str, *args): yield Record(row, result_columns, self._dialect, column_maps) @@ -255,7 +178,6 @@ def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: processors[key](val) if key in processors else val for key, val in compiled_params ] - result_map = compiled._result_columns else: compiled_query = compiled.string @@ -268,34 +190,6 @@ def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: ) return compiled_query, args, result_map - @staticmethod - def _create_column_maps( - result_columns: tuple, - ) -> typing.Tuple[ - typing.Mapping[typing.Any, typing.Tuple[int, TypeEngine]], - typing.Mapping[int, typing.Tuple[int, TypeEngine]], - typing.Mapping[str, typing.Tuple[int, TypeEngine]], - ]: - """ - Generate column -> datatype mappings from the column definitions. - - These mappings are used throughout PostgresConnection methods - to initialize Record-s. The underlying DB driver does not do type - conversion for us so we have wrap the returned asyncpg.Record-s. - - :return: Three mappings from different ways to address a column to \ - corresponding column indexes and datatypes: \ - 1. by column identifier; \ - 2. by column index; \ - 3. by column name in Column sqlalchemy objects. - """ - column_map, column_map_int, column_map_full = {}, {}, {} - for idx, (column_name, _, column, datatype) in enumerate(result_columns): - column_map[column_name] = (idx, datatype) - column_map_int[idx] = (idx, datatype) - column_map_full[str(column[0])] = (idx, datatype) - return column_map, column_map_int, column_map_full - @property def raw_connection(self) -> asyncpg.connection.Connection: assert self._connection is not None, "Connection is not acquired" diff --git a/databases/backends/sqlite.py b/databases/backends/sqlite.py index 19464627..f0732d6f 100644 --- a/databases/backends/sqlite.py +++ b/databases/backends/sqlite.py @@ -6,17 +6,12 @@ from sqlalchemy.dialects.sqlite import pysqlite from sqlalchemy.engine.cursor import CursorResultMetaData from sqlalchemy.engine.interfaces import Dialect, ExecutionContext -from sqlalchemy.engine.row import Row from sqlalchemy.sql import ClauseElement from sqlalchemy.sql.ddl import DDLElement +from databases.backends.common.records import Record, Row, create_column_maps from databases.core import LOG_EXTRA, DatabaseURL -from databases.interfaces import ( - ConnectionBackend, - DatabaseBackend, - Record, - TransactionBackend, -) +from databases.interfaces import ConnectionBackend, DatabaseBackend, TransactionBackend logger = logging.getLogger("databases") @@ -33,23 +28,10 @@ def __init__( self._pool = SQLitePool(self._database_url, **self._options) async def connect(self) -> None: - pass - # assert self._pool is None, "DatabaseBackend is already running" - # self._pool = await aiomysql.create_pool( - # host=self._database_url.hostname, - # port=self._database_url.port or 3306, - # user=self._database_url.username or getpass.getuser(), - # password=self._database_url.password, - # db=self._database_url.database, - # autocommit=True, - # ) + ... async def disconnect(self) -> None: - pass - # assert self._pool is not None, "DatabaseBackend is not running" - # self._pool.close() - # await self._pool.wait_closed() - # self._pool = None + ... def connection(self) -> "SQLiteConnection": return SQLiteConnection(self._pool, self._dialect) @@ -93,12 +75,14 @@ async def release(self) -> None: async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect async with self._connection.execute(query_str, args) as cursor: rows = await cursor.fetchall() metadata = CursorResultMetaData(context, cursor.description) - return [ + rows = [ Row( metadata, metadata._processors, @@ -108,27 +92,31 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: ) for row in rows ] + return [Record(row, result_columns, dialect, column_maps) for row in rows] async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect async with self._connection.execute(query_str, args) as cursor: row = await cursor.fetchone() if row is None: return None metadata = CursorResultMetaData(context, cursor.description) - return Row( + row = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + return Record(row, result_columns, dialect, column_maps) async def execute(self, query: ClauseElement) -> typing.Any: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) async with self._connection.cursor() as cursor: await cursor.execute(query_str, args) if cursor.lastrowid == 0: @@ -144,34 +132,38 @@ async def iterate( self, query: ClauseElement ) -> typing.AsyncGenerator[typing.Any, None]: assert self._connection is not None, "Connection is not acquired" - query_str, args, context = self._compile(query) + query_str, args, result_columns, context = self._compile(query) + column_maps = create_column_maps(result_columns) + dialect = self._dialect + async with self._connection.execute(query_str, args) as cursor: metadata = CursorResultMetaData(context, cursor.description) async for row in cursor: - yield Row( + record = Row( metadata, metadata._processors, metadata._keymap, Row._default_key_style, row, ) + yield Record(record, result_columns, dialect, column_maps) def transaction(self) -> TransactionBackend: return SQLiteTransaction(self) - def _compile( - self, query: ClauseElement - ) -> typing.Tuple[str, list, CompilationContext]: + def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: compiled = query.compile( dialect=self._dialect, compile_kwargs={"render_postcompile": True} ) - execution_context = self._dialect.execution_ctx_cls() execution_context.dialect = self._dialect args = [] + result_map = None if not isinstance(query, DDLElement): + compiled_params = sorted(compiled.params.items()) + params = compiled.construct_params() for key in compiled.positiontup: raw_val = params[key] @@ -189,11 +181,20 @@ def _compile( compiled._loose_column_name_matching, ) - query_message = compiled.string.replace(" \n", " ").replace("\n", " ") + mapping = { + key: "$" + str(i) for i, (key, _) in enumerate(compiled_params, start=1) + } + compiled_query = compiled.string % mapping + result_map = compiled._result_columns + + else: + compiled_query = compiled.string + + query_message = compiled_query.replace(" \n", " ").replace("\n", " ") logger.debug( "Query: %s Args: %s", query_message, repr(tuple(args)), extra=LOG_EXTRA ) - return compiled.string, args, CompilationContext(execution_context) + return compiled.string, args, result_map, CompilationContext(execution_context) @property def raw_connection(self) -> aiosqlite.core.Connection: diff --git a/databases/core.py b/databases/core.py index 8394ab5c..4382e625 100644 --- a/databases/core.py +++ b/databases/core.py @@ -326,7 +326,7 @@ def _build_query( return query.bindparams(**values) if values is not None else query elif values: - return query.values(**values) + return query.values(**values) # type: ignore return query diff --git a/docs/index.md b/docs/index.md index b18de817..e326df00 100644 --- a/docs/index.md +++ b/docs/index.md @@ -15,9 +15,12 @@ It allows you to make queries using the powerful [SQLAlchemy Core][sqlalchemy-co expression language, and provides support for PostgreSQL, MySQL, and SQLite. Databases is suitable for integrating against any async Web framework, such as [Starlette][starlette], -[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], or [FastAPI][fastapi]. +[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], +[FastAPI][fastapi] or [Esmerald][esmerald]. -**Requirements**: Python 3.7+ +**Documentation**: [https://www.encode.io/databases/](https://www.encode.io/databases/) + +**Requirements**: Python 3.8+ --- @@ -89,7 +92,7 @@ rows = await database.fetch_all(query=query) print('High Scores:', rows) ``` -Check out the documentation on [making database queries](database_queries.md) +Check out the documentation on [making database queries](https://www.encode.io/databases/database_queries/) for examples of how to start using databases together with SQLAlchemy core expressions. @@ -114,3 +117,4 @@ for examples of how to start using databases together with SQLAlchemy core expre [aiohttp]: https://github.com/aio-libs/aiohttp [tornado]: https://github.com/tornadoweb/tornado [fastapi]: https://github.com/tiangolo/fastapi +[esmerald]: https://github.com/dymmond/esmerald diff --git a/mkdocs.yml b/mkdocs.yml index 2dbabde8..83aced4d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,21 +2,21 @@ site_name: Databases site_description: Async database support for Python. theme: - name: 'material' + name: "material" repo_name: encode/databases repo_url: https://github.com/encode/databases # edit_uri: "" nav: - - Introduction: 'index.md' - - Database Queries: 'database_queries.md' - - Connections & Transactions: 'connections_and_transactions.md' - - Tests & Migrations: 'tests_and_migrations.md' - - Contributing: 'contributing.md' + - Introduction: "index.md" + - Database Queries: "database_queries.md" + - Connections & Transactions: "connections_and_transactions.md" + - Tests & Migrations: "tests_and_migrations.md" + - Contributing: "contributing.md" markdown_extensions: - - mkautodoc - - admonition - - pymdownx.highlight - - pymdownx.superfences + - mkautodoc + - admonition + - pymdownx.highlight + - pymdownx.superfences diff --git a/requirements.txt b/requirements.txt index 0699d3cc..53ac168e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,14 @@ -e . # Async database drivers -asyncmy==0.2.5 +asyncmy==0.2.7 +aiopg==1.4.0 aiomysql==0.1.1 -aiopg==1.3.4 aiosqlite==0.17.0 asyncpg==0.26.0 # Sync database drivers for standard tooling around setup/teardown/migrations. -psycopg2-binary==2.9.3 +psycopg2-binary==2.9.5 pymysql==1.0.2 # Testing diff --git a/scripts/clean b/scripts/clean index f01cc831..d7388629 100755 --- a/scripts/clean +++ b/scripts/clean @@ -9,6 +9,12 @@ fi if [ -d 'databases.egg-info' ] ; then rm -r databases.egg-info fi +if [ -d '.mypy_cache' ] ; then + rm -r .mypy_cache +fi +if [ -d '.pytest_cache' ] ; then + rm -r .pytest_cache +fi find databases -type f -name "*.py[co]" -delete find databases -type d -name __pycache__ -delete diff --git a/setup.cfg b/setup.cfg index da1831fd..b4182c83 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,6 +2,11 @@ disallow_untyped_defs = True ignore_missing_imports = True no_implicit_optional = True +disallow_any_generics = false +disallow_untyped_decorators = true +implicit_reexport = true +disallow_incomplete_defs = true +exclude = databases/backends [tool:isort] profile = black diff --git a/setup.py b/setup.py index 3725cab9..3031a8b9 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ def get_packages(package): setup( name="databases", version=get_version("databases"), - python_requires=">=3.7", + python_requires=">=3.8", url="https://github.com/encode/databases", license="BSD", description="Async database support for Python.", @@ -47,7 +47,7 @@ def get_packages(package): author_email="tom@tomchristie.com", packages=get_packages("databases"), package_data={"databases": ["py.typed"]}, - install_requires=["sqlalchemy>=1.4.42,<1.5"], + install_requires=["sqlalchemy>=2.0.7"], extras_require={ "postgresql": ["asyncpg"], "asyncpg": ["asyncpg"], @@ -66,10 +66,10 @@ def get_packages(package): "Operating System :: OS Independent", "Topic :: Internet :: WWW/HTTP", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3 :: Only", ], zip_safe=False, diff --git a/tests/test_connection_options.py b/tests/test_connection_options.py index e6fe6849..584f6a90 100644 --- a/tests/test_connection_options.py +++ b/tests/test_connection_options.py @@ -6,6 +6,7 @@ import pytest from databases.backends.aiopg import AiopgBackend +from databases.backends.mysql import MySQLBackend from databases.backends.postgres import PostgresBackend from databases.core import DatabaseURL from tests.test_databases import DATABASE_URLS, async_adapter diff --git a/tests/test_databases.py b/tests/test_databases.py index a7545e31..d24466b8 100644 --- a/tests/test_databases.py +++ b/tests/test_databases.py @@ -3,7 +3,6 @@ import decimal import functools import os -import re from unittest.mock import MagicMock, patch import pytest @@ -167,24 +166,24 @@ async def test_queries(database_url): assert result["completed"] == True # fetch_val() - query = sqlalchemy.sql.select([notes.c.text]) + query = sqlalchemy.sql.select(*[notes.c.text]) result = await database.fetch_val(query=query) assert result == "example1" # fetch_val() with no rows - query = sqlalchemy.sql.select([notes.c.text]).where( + query = sqlalchemy.sql.select(*[notes.c.text]).where( notes.c.text == "impossible" ) result = await database.fetch_val(query=query) assert result is None # fetch_val() with a different column - query = sqlalchemy.sql.select([notes.c.id, notes.c.text]) + query = sqlalchemy.sql.select(*[notes.c.id, notes.c.text]) result = await database.fetch_val(query=query, column=1) assert result == "example1" # row access (needed to maintain test coverage for Record.__getitem__ in postgres backend) - query = sqlalchemy.sql.select([notes.c.text]) + query = sqlalchemy.sql.select(*[notes.c.text]) result = await database.fetch_one(query=query) assert result["text"] == "example1" assert result[0] == "example1" @@ -244,6 +243,7 @@ async def test_queries_raw(database_url): query = "SELECT completed FROM notes WHERE text = :text" result = await database.fetch_val(query=query, values={"text": "example1"}) assert result == True + query = "SELECT * FROM notes WHERE text = :text" result = await database.fetch_val( query=query, values={"text": "example1"}, column="completed" @@ -354,7 +354,7 @@ async def test_results_support_column_reference(database_url): await database.execute(query, values) # fetch_all() - query = sqlalchemy.select([articles, custom_date]) + query = sqlalchemy.select(*[articles, custom_date]) results = await database.fetch_all(query=query) assert len(results) == 1 assert results[0][articles.c.title] == "Hello, world Article" @@ -689,6 +689,7 @@ async def test_json_field(database_url): # fetch_all() query = session.select() results = await database.fetch_all(query=query) + assert len(results) == 1 assert results[0]["data"] == {"text": "hello", "boolean": True, "int": 1} @@ -1075,52 +1076,6 @@ async def test_column_names(database_url, select_query): assert results[0]["completed"] == True -@pytest.mark.parametrize("database_url", DATABASE_URLS) -@async_adapter -async def test_posgres_interface(database_url): - """ - Since SQLAlchemy 1.4, `Row.values()` is removed and `Row.keys()` is deprecated. - Custom postgres interface mimics more or less this behaviour by deprecating those - two methods - """ - database_url = DatabaseURL(database_url) - - if database_url.scheme not in ["postgresql", "postgresql+asyncpg"]: - pytest.skip("Test is only for asyncpg") - - async with Database(database_url) as database: - async with database.transaction(force_rollback=True): - query = notes.insert() - values = {"text": "example1", "completed": True} - await database.execute(query, values) - - query = notes.select() - result = await database.fetch_one(query=query) - - with pytest.warns( - DeprecationWarning, - match=re.escape( - "The `Row.keys()` method is deprecated to mimic SQLAlchemy behaviour, " - "use `Row._mapping.keys()` instead." - ), - ): - assert ( - list(result.keys()) - == [k for k in result] - == ["id", "text", "completed"] - ) - - with pytest.warns( - DeprecationWarning, - match=re.escape( - "The `Row.values()` method is deprecated to mimic SQLAlchemy behaviour, " - "use `Row._mapping.values()` instead." - ), - ): - # avoid checking `id` at index 0 since it may change depending on the launched tests - assert list(result.values())[1:] == ["example1", True] - - @pytest.mark.parametrize("database_url", DATABASE_URLS) @async_adapter async def test_postcompile_queries(database_url): From 4cc05b98c63cdbe5d79de6b5bdd5d36a2b180ebd Mon Sep 17 00:00:00 2001 From: tarsil Date: Wed, 22 Mar 2023 16:44:15 +0000 Subject: [PATCH 2/8] =?UTF-8?q?=F0=9F=AA=B2=20Fix=20specific=20asynpg=20or?= =?UTF-8?q?iented=20test?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * The connections were hanging sometimes during the tests. --- tests/test_databases.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_databases.py b/tests/test_databases.py index d24466b8..d576b2f0 100644 --- a/tests/test_databases.py +++ b/tests/test_databases.py @@ -498,6 +498,7 @@ def insert_independently(): query = notes.insert().values(text="example1", completed=True) conn.execute(query) + conn.close() def delete_independently(): engine = sqlalchemy.create_engine(str(database_url)) @@ -505,6 +506,7 @@ def delete_independently(): query = notes.delete() conn.execute(query) + conn.close() async with Database(database_url) as database: async with database.transaction(force_rollback=True, isolation="serializable"): From 64dd1b38cecdb7778a1a092f26026840b62dbd66 Mon Sep 17 00:00:00 2001 From: tarsil Date: Wed, 22 Mar 2023 22:07:54 +0000 Subject: [PATCH 3/8] Add python 3.7 back to support * Revert changes automatically applied by IDE * Fix formatting of CI caused by IDE changes * Codebase cleanup for SQLAlchemy 2.0 * Fix EOL in mkdocs --- .github/workflows/publish.yml | 4 +- .github/workflows/test-suite.yml | 112 +++++++++++++++---------------- README.md | 10 ++- docs/index.md | 10 ++- mkdocs.yml | 20 +++--- setup.py | 3 +- 6 files changed, 78 insertions(+), 81 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6ea3c65f..170e9558 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,7 +4,7 @@ name: Publish on: push: tags: - - "*" + - '*' jobs: publish: @@ -15,7 +15,7 @@ jobs: - uses: "actions/checkout@v3" - uses: "actions/setup-python@v4" with: - python-version: 3.8 + python-version: 3.7 - name: "Install dependencies" run: "scripts/install" - name: "Build package & docs" diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index e00989b5..3c01b801 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -1,63 +1,63 @@ --- - name: Test Suite +name: Test Suite - on: - push: - branches: ["master"] - pull_request: - branches: ["master"] +on: + push: + branches: ["master"] + pull_request: + branches: ["master"] - jobs: - tests: - name: "Python ${{ matrix.python-version }}" - runs-on: "ubuntu-latest" +jobs: + tests: + name: "Python ${{ matrix.python-version }}" + runs-on: "ubuntu-latest" - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + strategy: + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] - services: - mysql: - image: mysql:5.7 - env: - MYSQL_USER: username - MYSQL_PASSWORD: password - MYSQL_ROOT_PASSWORD: password - MYSQL_DATABASE: testsuite - ports: - - 3306:3306 - options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + services: + mysql: + image: mysql:5.7 + env: + MYSQL_USER: username + MYSQL_PASSWORD: password + MYSQL_ROOT_PASSWORD: password + MYSQL_DATABASE: testsuite + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 - postgres: - image: postgres:14 - env: - POSTGRES_USER: username - POSTGRES_PASSWORD: password - POSTGRES_DB: testsuite - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + postgres: + image: postgres:14 + env: + POSTGRES_USER: username + POSTGRES_PASSWORD: password + POSTGRES_DB: testsuite + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - steps: - - uses: "actions/checkout@v3" - - uses: "actions/setup-python@v4" - with: - python-version: "${{ matrix.python-version }}" - - name: "Install dependencies" - run: "scripts/install" - - name: "Run linting checks" - run: "scripts/check" - - name: "Build package & docs" - run: "scripts/build" - - name: "Run tests" - env: - TEST_DATABASE_URLS: | - sqlite:///testsuite, - sqlite+aiosqlite:///testsuite, - mysql://username:password@localhost:3306/testsuite, - mysql+aiomysql://username:password@localhost:3306/testsuite, - mysql+asyncmy://username:password@localhost:3306/testsuite, - postgresql://username:password@localhost:5432/testsuite, - postgresql+aiopg://username:password@127.0.0.1:5432/testsuite, - postgresql+asyncpg://username:password@localhost:5432/testsuite - run: "scripts/test" \ No newline at end of file + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: "${{ matrix.python-version }}" + - name: "Install dependencies" + run: "scripts/install" + - name: "Run linting checks" + run: "scripts/check" + - name: "Build package & docs" + run: "scripts/build" + - name: "Run tests" + env: + TEST_DATABASE_URLS: | + sqlite:///testsuite, + sqlite+aiosqlite:///testsuite, + mysql://username:password@localhost:3306/testsuite, + mysql+aiomysql://username:password@localhost:3306/testsuite, + mysql+asyncmy://username:password@localhost:3306/testsuite, + postgresql://username:password@localhost:5432/testsuite, + postgresql+aiopg://username:password@127.0.0.1:5432/testsuite, + postgresql+asyncpg://username:password@localhost:5432/testsuite + run: "scripts/test" diff --git a/README.md b/README.md index e326df00..89107f2c 100644 --- a/README.md +++ b/README.md @@ -15,12 +15,11 @@ It allows you to make queries using the powerful [SQLAlchemy Core][sqlalchemy-co expression language, and provides support for PostgreSQL, MySQL, and SQLite. Databases is suitable for integrating against any async Web framework, such as [Starlette][starlette], -[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], -[FastAPI][fastapi] or [Esmerald][esmerald]. +[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], or [FastAPI][fastapi]. **Documentation**: [https://www.encode.io/databases/](https://www.encode.io/databases/) -**Requirements**: Python 3.8+ +**Requirements**: Python 3.7+ --- @@ -86,7 +85,7 @@ values = [ ] await database.execute_many(query=query, values=values) -# Run a database query. +# Run a database query. query = "SELECT * FROM HighScores" rows = await database.fetch_all(query=query) print('High Scores:', rows) @@ -116,5 +115,4 @@ for examples of how to start using databases together with SQLAlchemy core expre [quart]: https://gitlab.com/pgjones/quart [aiohttp]: https://github.com/aio-libs/aiohttp [tornado]: https://github.com/tornadoweb/tornado -[fastapi]: https://github.com/tiangolo/fastapi -[esmerald]: https://github.com/dymmond/esmerald +[fastapi]: https://github.com/tiangolo/fastapi \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index e326df00..89107f2c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -15,12 +15,11 @@ It allows you to make queries using the powerful [SQLAlchemy Core][sqlalchemy-co expression language, and provides support for PostgreSQL, MySQL, and SQLite. Databases is suitable for integrating against any async Web framework, such as [Starlette][starlette], -[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], -[FastAPI][fastapi] or [Esmerald][esmerald]. +[Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], or [FastAPI][fastapi]. **Documentation**: [https://www.encode.io/databases/](https://www.encode.io/databases/) -**Requirements**: Python 3.8+ +**Requirements**: Python 3.7+ --- @@ -86,7 +85,7 @@ values = [ ] await database.execute_many(query=query, values=values) -# Run a database query. +# Run a database query. query = "SELECT * FROM HighScores" rows = await database.fetch_all(query=query) print('High Scores:', rows) @@ -116,5 +115,4 @@ for examples of how to start using databases together with SQLAlchemy core expre [quart]: https://gitlab.com/pgjones/quart [aiohttp]: https://github.com/aio-libs/aiohttp [tornado]: https://github.com/tornadoweb/tornado -[fastapi]: https://github.com/tiangolo/fastapi -[esmerald]: https://github.com/dymmond/esmerald +[fastapi]: https://github.com/tiangolo/fastapi \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 83aced4d..2dbabde8 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,21 +2,21 @@ site_name: Databases site_description: Async database support for Python. theme: - name: "material" + name: 'material' repo_name: encode/databases repo_url: https://github.com/encode/databases # edit_uri: "" nav: - - Introduction: "index.md" - - Database Queries: "database_queries.md" - - Connections & Transactions: "connections_and_transactions.md" - - Tests & Migrations: "tests_and_migrations.md" - - Contributing: "contributing.md" + - Introduction: 'index.md' + - Database Queries: 'database_queries.md' + - Connections & Transactions: 'connections_and_transactions.md' + - Tests & Migrations: 'tests_and_migrations.md' + - Contributing: 'contributing.md' markdown_extensions: - - mkautodoc - - admonition - - pymdownx.highlight - - pymdownx.superfences + - mkautodoc + - admonition + - pymdownx.highlight + - pymdownx.superfences diff --git a/setup.py b/setup.py index 3031a8b9..a6bb8965 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ def get_packages(package): setup( name="databases", version=get_version("databases"), - python_requires=">=3.8", + python_requires=">=3.7", url="https://github.com/encode/databases", license="BSD", description="Async database support for Python.", @@ -66,6 +66,7 @@ def get_packages(package): "Operating System :: OS Independent", "Topic :: Internet :: WWW/HTTP", "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", From a703d5460fb45e1b909ff8a5ab09770a392eabdf Mon Sep 17 00:00:00 2001 From: tarsil Date: Fri, 24 Mar 2023 10:23:02 +0000 Subject: [PATCH 4/8] Revert the previous changes to index.md --- docs/index.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/index.md b/docs/index.md index 89107f2c..fba3f147 100644 --- a/docs/index.md +++ b/docs/index.md @@ -17,8 +17,6 @@ expression language, and provides support for PostgreSQL, MySQL, and SQLite. Databases is suitable for integrating against any async Web framework, such as [Starlette][starlette], [Sanic][sanic], [Responder][responder], [Quart][quart], [aiohttp][aiohttp], [Tornado][tornado], or [FastAPI][fastapi]. -**Documentation**: [https://www.encode.io/databases/](https://www.encode.io/databases/) - **Requirements**: Python 3.7+ --- @@ -91,7 +89,7 @@ rows = await database.fetch_all(query=query) print('High Scores:', rows) ``` -Check out the documentation on [making database queries](https://www.encode.io/databases/database_queries/) +Check out the documentation on [making database queries](database_queries.md) for examples of how to start using databases together with SQLAlchemy core expressions. From deedd134a9fce3c45ce7e79ebc3d420a034020bd Mon Sep 17 00:00:00 2001 From: tarsil Date: Fri, 24 Mar 2023 10:30:33 +0000 Subject: [PATCH 5/8] Update changelog and revert version --- CHANGELOG.md | 7 ------- 1 file changed, 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e05e81a..36d59393 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,18 +4,11 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). -## 0.8.0 (Mar 21, 2022) - ### Added - Support for SQLAlchemy 2.0+ - Added internal support for the new psycopg dialect. -### Changed - -- Removed support for python 3.7 in favour to 3.8. - - Python 3.7 support will end in June 2023 - ## 0.7.0 (Dec 18th, 2022) ### Fixed From ec987913b8ef815b68629d65ccbd3dc45143efcd Mon Sep 17 00:00:00 2001 From: tarsil Date: Wed, 5 Apr 2023 10:24:26 +0100 Subject: [PATCH 6/8] Rollback CHANGELOG to place in another PR. --- CHANGELOG.md | 7 +------ tests/test_connection_options.py | 1 - 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36d59393..fcddb061 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,11 +4,6 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). -### Added - -- Support for SQLAlchemy 2.0+ -- Added internal support for the new psycopg dialect. - ## 0.7.0 (Dec 18th, 2022) ### Fixed @@ -128,4 +123,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). * Fix type hinting for sqlite backend (#227) * Fix SQLAlchemy DDL statements (#226) * Make fetch_val call fetch_one for type conversion (#246) -* Unquote username and password in DatabaseURL (#248) +* Unquote username and password in DatabaseURL (#248) \ No newline at end of file diff --git a/tests/test_connection_options.py b/tests/test_connection_options.py index 584f6a90..e6fe6849 100644 --- a/tests/test_connection_options.py +++ b/tests/test_connection_options.py @@ -6,7 +6,6 @@ import pytest from databases.backends.aiopg import AiopgBackend -from databases.backends.mysql import MySQLBackend from databases.backends.postgres import PostgresBackend from databases.core import DatabaseURL from tests.test_databases import DATABASE_URLS, async_adapter From b0ec5b3484f702f19ec24784c68fa3922d06ba0d Mon Sep 17 00:00:00 2001 From: ansipunk Date: Sun, 24 Dec 2023 15:45:13 +0100 Subject: [PATCH 7/8] Fix compatibility with SQLAlchemy>=2.0.11 --- databases/backends/aiopg.py | 3 --- databases/backends/asyncmy.py | 3 --- databases/backends/common/records.py | 13 ++++--------- databases/backends/mysql.py | 3 --- databases/backends/sqlite.py | 3 --- 5 files changed, 4 insertions(+), 21 deletions(-) diff --git a/databases/backends/aiopg.py b/databases/backends/aiopg.py index 0b2a6e89..0b4d95a3 100644 --- a/databases/backends/aiopg.py +++ b/databases/backends/aiopg.py @@ -134,7 +134,6 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) for row in rows @@ -159,7 +158,6 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterfa metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) return Record(row, result_columns, dialect, column_maps) @@ -202,7 +200,6 @@ async def iterate( metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) yield Record(record, result_columns, dialect, column_maps) diff --git a/databases/backends/asyncmy.py b/databases/backends/asyncmy.py index f224c7cc..37831313 100644 --- a/databases/backends/asyncmy.py +++ b/databases/backends/asyncmy.py @@ -121,7 +121,6 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) for row in rows @@ -148,7 +147,6 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterfa metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) return Record(row, result_columns, dialect, column_maps) @@ -193,7 +191,6 @@ async def iterate( metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) yield Record(record, result_columns, dialect, column_maps) diff --git a/databases/backends/common/records.py b/databases/backends/common/records.py index 77a4d8fa..1d8a2fd4 100644 --- a/databases/backends/common/records.py +++ b/databases/backends/common/records.py @@ -91,9 +91,10 @@ def __getitem__(self, key: typing.Any) -> typing.Any: the values. """ if isinstance(key, int): - field = self._fields[key] - return self._mapping[field] - return self._mapping[key] + return super().__getitem__(key) + + idx = self._key_to_index[key][0] + return super().__getitem__(idx) def keys(self): return self._mapping.keys() @@ -101,12 +102,6 @@ def keys(self): def values(self): return self._mapping.values() - def __getattr__(self, name: str) -> typing.Any: - try: - return self.__getitem__(name) - except KeyError as e: - raise AttributeError(e.args[0]) from e - def create_column_maps( result_columns: typing.Any, diff --git a/databases/backends/mysql.py b/databases/backends/mysql.py index d93b4a7f..7d65912c 100644 --- a/databases/backends/mysql.py +++ b/databases/backends/mysql.py @@ -120,7 +120,6 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[RecordInterface]: metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) for row in rows @@ -145,7 +144,6 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[RecordInterfa metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) return Record(row, result_columns, dialect, column_maps) @@ -190,7 +188,6 @@ async def iterate( metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) yield Record(record, result_columns, dialect, column_maps) diff --git a/databases/backends/sqlite.py b/databases/backends/sqlite.py index f0732d6f..762dfb95 100644 --- a/databases/backends/sqlite.py +++ b/databases/backends/sqlite.py @@ -87,7 +87,6 @@ async def fetch_all(self, query: ClauseElement) -> typing.List[Record]: metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) for row in rows @@ -109,7 +108,6 @@ async def fetch_one(self, query: ClauseElement) -> typing.Optional[Record]: metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) return Record(row, result_columns, dialect, column_maps) @@ -143,7 +141,6 @@ async def iterate( metadata, metadata._processors, metadata._keymap, - Row._default_key_style, row, ) yield Record(record, result_columns, dialect, column_maps) From c5a5b19a7259ec146896fae0dd0983c540767c53 Mon Sep 17 00:00:00 2001 From: tarsil Date: Wed, 21 Feb 2024 12:17:04 +0000 Subject: [PATCH 8/8] Fix linting and formatting --- databases/backends/sqlite.py | 15 +++++++++++---- tests/test_databases.py | 29 +++++++++++++++++++++-------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/databases/backends/sqlite.py b/databases/backends/sqlite.py index 8b649fc9..16e17e9e 100644 --- a/databases/backends/sqlite.py +++ b/databases/backends/sqlite.py @@ -29,7 +29,8 @@ def __init__( self._dialect.supports_native_decimal = False self._pool = SQLitePool(self._database_url, **self._options) - async def connect(self) -> None: ... + async def connect(self) -> None: + ... async def disconnect(self) -> None: # if it extsis, remove reference to connection to cached in-memory database on disconnect @@ -141,7 +142,9 @@ async def execute_many(self, queries: typing.List[ClauseElement]) -> None: for single_query in queries: await self.execute(single_query) - async def iterate(self, query: ClauseElement) -> typing.AsyncGenerator[typing.Any, None]: + async def iterate( + self, query: ClauseElement + ) -> typing.AsyncGenerator[typing.Any, None]: assert self._connection is not None, "Connection is not acquired" query_str, args, result_columns, context = self._compile(query) column_maps = create_column_maps(result_columns) @@ -191,7 +194,9 @@ def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: compiled._loose_column_name_matching, ) - mapping = {key: "$" + str(i) for i, (key, _) in enumerate(compiled_params, start=1)} + mapping = { + key: "$" + str(i) for i, (key, _) in enumerate(compiled_params, start=1) + } compiled_query = compiled.string % mapping result_map = compiled._result_columns @@ -199,7 +204,9 @@ def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]: compiled_query = compiled.string query_message = compiled_query.replace(" \n", " ").replace("\n", " ") - logger.debug("Query: %s Args: %s", query_message, repr(tuple(args)), extra=LOG_EXTRA) + logger.debug( + "Query: %s Args: %s", query_message, repr(tuple(args)), extra=LOG_EXTRA + ) return compiled.string, args, result_map, CompilationContext(execution_context) @property diff --git a/tests/test_databases.py b/tests/test_databases.py index 86cdd6d9..cd907fd1 100644 --- a/tests/test_databases.py +++ b/tests/test_databases.py @@ -5,7 +5,6 @@ import gc import itertools import os -import re import sqlite3 from typing import MutableMapping from unittest.mock import MagicMock, patch @@ -179,7 +178,9 @@ async def test_queries(database_url): assert result == "example1" # fetch_val() with no rows - query = sqlalchemy.sql.select(*[notes.c.text]).where(notes.c.text == "impossible") + query = sqlalchemy.sql.select(*[notes.c.text]).where( + notes.c.text == "impossible" + ) result = await database.fetch_val(query=query) assert result is None @@ -496,7 +497,9 @@ async def check_transaction(transaction, active_transaction): assert transaction._transaction is active_transaction async with database.transaction() as transaction: - await asyncio.create_task(check_transaction(transaction, transaction._transaction)) + await asyncio.create_task( + check_transaction(transaction, transaction._transaction) + ) @pytest.mark.parametrize("database_url", DATABASE_URLS) @@ -509,10 +512,14 @@ async def test_transaction_context_child_task_inheritance_example(database_url): async with Database(database_url) as database: async with database.transaction(): # Create a note - await database.execute(notes.insert().values(id=1, text="setup", completed=True)) + await database.execute( + notes.insert().values(id=1, text="setup", completed=True) + ) # Change the note from the same task - await database.execute(notes.update().where(notes.c.id == 1).values(text="prior")) + await database.execute( + notes.update().where(notes.c.id == 1).values(text="prior") + ) # Confirm the change result = await database.fetch_one(notes.select().where(notes.c.id == 1)) @@ -520,7 +527,9 @@ async def test_transaction_context_child_task_inheritance_example(database_url): async def run_update_from_child_task(connection): # Change the note from a child task - await connection.execute(notes.update().where(notes.c.id == 1).values(text="test")) + await connection.execute( + notes.update().where(notes.c.id == 1).values(text="test") + ) await asyncio.create_task(run_update_from_child_task(database.connection())) @@ -573,7 +582,9 @@ async def test_transaction_context_sibling_task_isolation_example(database_url): async def tx1(connection): async with connection.transaction(): - await db.execute(notes.insert(), values={"id": 1, "text": "tx1", "completed": False}) + await db.execute( + notes.insert(), values={"id": 1, "text": "tx1", "completed": False} + ) setup.set() await done.wait() @@ -875,7 +886,9 @@ async def test_transaction_decorator_concurrent(database_url): @database.transaction() async def insert_data(): - await database.execute(query=notes.insert().values(text="example", completed=True)) + await database.execute( + query=notes.insert().values(text="example", completed=True) + ) async with database: await asyncio.gather(