diff --git a/.env.example b/.env.example index 01eae25..78afbbd 100644 --- a/.env.example +++ b/.env.example @@ -1,10 +1,10 @@ # MSSQL -MSSQL_HOST="localhost" -MSSQL_PORT="1433" -MSSQL_USER="sa" -MSSQL_PASSWORD="L0caldev" -MSSQL_DB_SCHEMA="dbo" -MSSQL_DATABASE="master" +MSSQL_HOST=localhost +MSSQL_PORT=1433 +MSSQL_USER=sa +MSSQL_PASSWORD=L0caldev +MSSQL_DB_SCHEMA=dbo +MSSQL_DATABASE=master MSSQL_POOL_SIZE=20 MSSQL_OVERFLOW=10 MSSQL_ECHO=False @@ -12,14 +12,14 @@ MSSQL_ODBCDRIVER_VERSION=18 # PostgreSQL -POSTGRESQL_HOST="localhost" -POSTGRESQL_PORT="5432" -POSTGRESQL_USER="postgres" -POSTGRESQL_PASSWORD="postgres" -POSTGRESQL_DATABASE="master" +POSTGRESQL_HOST=localhost +POSTGRESQL_PORT=5432 +POSTGRESQL_USER=postgres +POSTGRESQL_PASSWORD=postgres +POSTGRESQL_DATABASE=postgres POSTGRESQL_ECHO=False # SQLite -SQLITE_FILE_PATH="sqlite.db" +SQLITE_FILE_PATH=sqlite.db SQLITE_ECHO=False diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0b201f6..57c1fac 100755 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: Run Unit Tests on Aditional Branches +name: Run Unit Tests on: push: diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index dce9f36..444a085 100755 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -7,7 +7,7 @@ on: - main jobs: - test_build: + build: runs-on: ubuntu-latest strategy: matrix: @@ -38,7 +38,7 @@ jobs: release: runs-on: ubuntu-latest needs: - - test_build + - build env: GITHUB_TOKEN: ${{ github.token }} steps: diff --git a/README.md b/README.md index 108f73a..97b8649 100755 --- a/README.md +++ b/README.md @@ -23,20 +23,41 @@ pip install ddcDatabases[pgsql] ``` + # Databases ++ Parameters for all classes are declared as OPTIONAL falling back to [.env](.env.example) file\ ++ All examples are using [db_utils.py](ddcDatabases/db_utils.py)\ ++ By default a session is always open\ ++ But the engine can be available, examples bellow + + ## SQLITE ``` -class Sqlite(db_file_path: str, echo=False) +class Sqlite( + file_path: Optional[str] = None, + echo: Optional[bool] = None, +) ``` +#### Session ```python import sqlalchemy as sa -from ddcDatabases.sqlite import Sqlite +from ddcDatabases import DBUtils, Sqlite with Sqlite() as session: + utils = DBUtils(session) stmt = sa.select(Table).where(Table.id == 1) - results = session.fetchall(stmt) + results = utils.fetchall(stmt) + for row in results: + print(row) +``` + +#### Sync Engine +```python +from ddcDatabases import Sqlite +with Sqlite().engine() as engine: + ... ``` @@ -45,96 +66,133 @@ with Sqlite() as session: ## MSSQL ``` -class MSSQL(db_file_path: str, echo=False) +class MSSQL( + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + database: Optional[str] = None, + schema: Optional[str] = None, + echo: Optional[bool] = None, + pool_size: Optional[int] = None, + max_overflow: Optional[int] = None +) ``` +#### Sync Example ```python import sqlalchemy as sa -from ddcDatabases.mssql import MSSQL +from ddcDatabases import DBUtils, MSSQL with MSSQL() as session: stmt = sa.select(Table).where(Table.id == 1) - results = session.fetchall(stmt) + db_utils = DBUtils(session) + results = db_utils.fetchall(stmt) + for row in results: + print(row) ``` +#### Async Example +```python +import sqlalchemy as sa +from ddcDatabases import DBUtilsAsync, MSSQL +async with MSSQL() as session: + stmt = sa.select(Table).where(Table.id == 1) + db_utils = DBUtilsAsync(session) + results = await db_utils.fetchall(stmt) + for row in results: + print(row) +``` +#### Sync Engine +```python +from ddcDatabases import MSSQL +with MSSQL().engine() as engine: + ... +``` + +#### Async Engine +```python +from ddcDatabases import MSSQL +async with MSSQL().async_engine() as engine: + ... +``` ## PostgreSQL - + Using driver "psycopg2" as default ++ Using driver [psycopg2](https://pypi.org/project/psycopg2/) as default ``` -class DBPostgres(future=True, echo=False, drivername="psycopg2", **kwargs) +class DBPostgres( + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + database: Optional[str] = None, + echo: Optional[bool] = None, +) ``` +#### Sync Example ```python import sqlalchemy as sa -from ddcDatabases import DBPostgres, DBUtils -db_configs = { - "username": username, - "password": password, - "host": host, - "port": port, - "database": database -} -dbpostgres = DBPostgres(**db_configs) -with dbpostgres.session() as session: +from ddcDatabases import DBUtils, PostgreSQL +with PostgreSQL() as session: stmt = sa.select(Table).where(Table.id == 1) db_utils = DBUtils(session) results = db_utils.fetchall(stmt) + for row in results: + print(row) ``` -+ DBUTILS - + Uses SQLAlchemy statements +#### Async Example ```python -from ddcDatabases import DBUtils -db_utils = DBUtils(session) -db_utils.add(stmt) -db_utils.execute(stmt) -db_utils.fetchall(stmt) -db_utils.fetchone(stmt) -db_utils.fetch_value(stmt) +import sqlalchemy as sa +from ddcDatabases import DBUtilsAsync, PostgreSQL +async with PostgreSQL() as session: + stmt = sa.select(Table).where(Table.id == 1) + db_utils = DBUtilsAsync(session) + results = await db_utils.fetchall(stmt) + for row in results: + print(row) ``` - -## DBPOSTGRES ASYNC - + Using driver "asyncpg" -``` -class DBPostgresAsync(future=True, echo=False, drivername="asyncpg", **kwargs) +#### Sync Engine +```python +from ddcDatabases import PostgreSQL +with PostgreSQL().engine() as engine: + ... ``` +#### Async Engine ```python -import sqlalchemy as sa -from ddcDatabases import DBPostgresAsync, DBUtilsAsync -db_configs = { - "username": username, - "password": password, - "host": host, - "port": port, - "database": database -} -dbpostgres = DBPostgresAsync(**db_configs) -async with dbpostgres.session() as session: - stmt = sa.select(Table).where(Table.id == 1) - db_utils = DBUtilsAsync(session) - results = await db_utils.fetchall(stmt) +from ddcDatabases import PostgreSQL +async with PostgreSQL().async_engine() as engine: + ... ``` -+ DBUTILS ASYNC - + Uses SQLAlchemy statements + + + +## DBUtils and DBUtilsAsync ++ Take an open session as parameter ++ Can use SQLAlchemy statements ++ Execute function can be used to update, insert or any SQLAlchemy.text ```python -from ddcDatabases import DBUtilsAsync -db_utils = DBUtilsAsync(session) -await db_utils.add(stmt) -await db_utils.execute(stmt) -await db_utils.fetchall(stmt) -await db_utils.fetchone(stmt) -await db_utils.fetch_value(stmt) +from ddcDatabases import DBUtils +db_utils = DBUtils(session) +db_utils.fetchall(stmt) # returns a list of RowMapping +db_utils.fetchvalue(stmt) # fetch a single value, returning as string +db_utils.insert(stmt) # insert into model table +db_utils.deleteall(model) # delete all records from model +db_utils.insertbulk(model, list[dict]) # insert records into model from a list of dicts +db_utils.execute(stmt) # this is the actual execute from session ``` + # Source Code ### Build ```shell diff --git a/ddcDatabases/__init__.py b/ddcDatabases/__init__.py index fe28046..d902ae1 100755 --- a/ddcDatabases/__init__.py +++ b/ddcDatabases/__init__.py @@ -1,15 +1,18 @@ import logging from importlib.metadata import version from typing import Literal, NamedTuple +from .db_utils import DBUtils, DBUtilsAsync from .mssql import MSSQL from .postgresql import PostgreSQL from .sqlite import Sqlite __all__ = ( - "Sqlite", - "PostgreSQL", + "DBUtils", + "DBUtilsAsync", "MSSQL", + "PostgreSQL", + "Sqlite", ) diff --git a/ddcDatabases/db_utils.py b/ddcDatabases/db_utils.py index 5e702b3..1c0a534 100755 --- a/ddcDatabases/db_utils.py +++ b/ddcDatabases/db_utils.py @@ -2,9 +2,9 @@ import sys from datetime import datetime import sqlalchemy as sa +from sqlalchemy import RowMapping from sqlalchemy.engine import URL -from sqlalchemy.engine.result import MappingResult -from sqlalchemy.ext.asyncio import AsyncMappingResult, AsyncSession +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from .exceptions import ( DBDeleteAllDataException, @@ -65,7 +65,7 @@ class DBUtils: def __init__(self, session): self.session = session - def fetchall(self, stmt) -> MappingResult: + def fetchall(self, stmt) -> list[RowMapping]: cursor = None try: cursor = self.session.execute(stmt) @@ -129,7 +129,7 @@ class DBUtilsAsync: def __init__(self, session): self.session = session - async def fetchall(self, stmt) -> AsyncMappingResult: + async def fetchall(self, stmt) -> list[RowMapping]: cursor = None try: cursor = await self.session.execute(stmt) diff --git a/ddcDatabases/mssql.py b/ddcDatabases/mssql.py index aa52911..37662c6 100755 --- a/ddcDatabases/mssql.py +++ b/ddcDatabases/mssql.py @@ -1,13 +1,14 @@ # -*- coding: utf-8 -*- -import sqlalchemy as sa -from sqlalchemy.engine import base, URL +from contextlib import asynccontextmanager, contextmanager +from typing import Optional +from sqlalchemy.engine import create_engine, Engine, URL from sqlalchemy.ext.asyncio import ( AsyncEngine, AsyncSession, - create_async_engine + create_async_engine, ) from sqlalchemy.orm import Session, sessionmaker -from .db_utils import DBUtils, DBUtilsAsync, TestConnections +from .db_utils import TestConnections from .settings import MSSQLSettings @@ -18,96 +19,110 @@ class MSSQL: def __init__( self, - host: str = None, - port: int = None, - username: str = None, - password: str = None, - database: str = None, - schema: str = None, - echo: bool = None, - pool_size: int = None, - max_overflow: int = None, + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + database: Optional[str] = None, + schema: Optional[str] = None, + echo: Optional[bool] = None, + pool_size: Optional[int] = None, + max_overflow: Optional[int] = None, ): _settings = MSSQLSettings() - self.session = None - self.host = _settings.host if not host else host - self.username = _settings.username if not username else username - self.password = _settings.password if not password else password - self.port = int(_settings.port if not port else int(port)) - self.database = _settings.database if not database else database - self.schema = _settings.db_schema if not schema else schema - self.echo = bool(_settings.echo if not echo else bool(echo)) - self.pool_size = int(_settings.pool_size - if not pool_size else int(pool_size)) - self.max_overflow = int(_settings.max_overflow - if not max_overflow else int(max_overflow)) + self.host = host or _settings.host + self.username = username or _settings.username + self.password = password or _settings.password + self.port = port or int(_settings.port) + self.database = database or _settings.database + self.schema = schema or _settings.db_schema + self.echo = echo or _settings.echo + self.pool_size = pool_size or int(_settings.pool_size) + self.max_overflow = max_overflow or int(_settings.max_overflow) - self.odbcdriver_version = int(_settings.odbcdriver_version) + self.temp_engine: Optional[Engine | AsyncEngine] = None + self.session: Optional[Session | AsyncSession] = None self.async_driver = _settings.async_driver self.sync_driver = _settings.sync_driver - self.query = { - "driver": f"ODBC Driver {self.odbcdriver_version} for SQL Server", - "TrustServerCertificate": "yes", + self.odbcdriver_version = int(_settings.odbcdriver_version) + self.connection_url = { + "username": self.username, + "password": self.password, + "host": self.host, + "port": self.port, + "database": self.database, + "query": { + "driver": f"ODBC Driver {self.odbcdriver_version} for SQL Server", + "TrustServerCertificate": "yes", + }, + } + self.engine_args = { + "pool_size": self.pool_size, + "max_overflow": self.max_overflow, + "echo": self.echo, } + if not self.username or not self.password: + raise RuntimeError("Missing username or password") + def __enter__(self): - engine = self._get_engine(sync=True) - session_maker = sessionmaker(bind=engine, - class_=Session, - autoflush=True, - expire_on_commit=True) - engine.dispose() - with session_maker.begin() as session: - self.session = session + with self.engine() as self.temp_engine: + session_maker = sessionmaker(bind=self.temp_engine, + class_=Session, + autoflush=True, + expire_on_commit=True) + with session_maker.begin() as self.session: self._test_connection_sync(self.session) - db_utils = DBUtils(self.session) - return db_utils + return self.session def __exit__(self, exc_type, exc_val, exc_tb): - self.session.close() + if self.session: + self.session.close() + if self.temp_engine: + self.temp_engine.dispose() async def __aenter__(self): - engine = self._get_engine(sync=False) - session_maker = sessionmaker(bind=engine, - class_=AsyncSession, - autoflush=True, - expire_on_commit=False) - await engine.dispose() - async with session_maker.begin() as session: - self.session = session + async with self.async_engine() as self.temp_engine: + session_maker = sessionmaker(bind=self.temp_engine, + class_=AsyncSession, + autoflush=True, + expire_on_commit=False) + async with session_maker.begin() as self.session: await self._test_connection_async(self.session) - db_utils = DBUtilsAsync(self.session) - return db_utils + return self.session async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.session.close() + if self.session: + await self.session.close() + if self.temp_engine: + await self.temp_engine.dispose() - def _get_engine(self, sync: bool = True) -> base.Engine | AsyncEngine: + @contextmanager + def engine(self) -> Engine: _connection_url = URL.create( - drivername=self.sync_driver if sync else self.async_driver, - username=self.username, - password=self.password, - host=self.host, - port=self.port, - database=self.database, - query=self.query, + **self.connection_url, + drivername=self.sync_driver ) - _engine_args = { "url": _connection_url, - "pool_size": self.pool_size, - "max_overflow": self.max_overflow, - "echo": self.echo, } + engine = create_engine(**_engine_args) + engine.update_execution_options(schema_translate_map={None: self.schema}) + yield engine - if sync: - engine = sa.create_engine(**_engine_args) - else: - engine = create_async_engine(**_engine_args) - + @asynccontextmanager + async def async_engine(self) -> AsyncEngine: + _connection_url = URL.create( + **self.connection_url, + drivername=self.async_driver + ) + _engine_args = { + "url": _connection_url, + } + engine = create_async_engine(**_engine_args) engine.update_execution_options(schema_translate_map={None: self.schema}) - return engine + yield engine def _test_connection_sync(self, session: Session) -> None: host_url = URL.create( diff --git a/ddcDatabases/postgresql.py b/ddcDatabases/postgresql.py index 4f5915a..4d3dbb8 100755 --- a/ddcDatabases/postgresql.py +++ b/ddcDatabases/postgresql.py @@ -1,13 +1,14 @@ # -*- encoding: utf-8 -*- -import sqlalchemy as sa -from sqlalchemy.engine import base, URL +from contextlib import asynccontextmanager, contextmanager +from typing import Optional +from sqlalchemy.engine import create_engine, Engine, URL from sqlalchemy.ext.asyncio import ( AsyncEngine, AsyncSession, create_async_engine, ) from sqlalchemy.orm import Session, sessionmaker -from .db_utils import DBUtils, DBUtilsAsync, TestConnections +from .db_utils import TestConnections from .settings import PostgreSQLSettings @@ -18,84 +19,101 @@ class PostgreSQL: def __init__( self, - host: str = None, - port: int = None, - username: str = None, - password: str = None, - database: str = None, - echo: bool = None, + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + database: Optional[str] = None, + echo: Optional[bool] = None, ): _settings = PostgreSQLSettings() - self.session = None - self.host = _settings.host if not host else host - self.username = _settings.username if not username else username - self.password = _settings.password if not password else password - self.port = int(_settings.port if not port else int(port)) - self.database = _settings.database if not database else database - self.echo = _settings.echo if not echo else echo + self.host = host or _settings.host + self.username = username or _settings.username + self.password = password or _settings.password + self.port = port or int(_settings.port) + self.database = database or _settings.database + self.echo = echo or _settings.echo + + self.temp_engine: Optional[Engine | AsyncEngine] = None + self.session: Optional[Session | AsyncSession] = None self.async_driver = _settings.async_driver self.sync_driver = _settings.sync_driver + self.connection_url = { + "username": self.username, + "password": self.password, + "host": self.host, + "port": self.port, + "database": self.database, + } + self.engine_args = { + "echo": self.echo, + } + + if not self.username or not self.password: + raise RuntimeError("Missing username or password") def __enter__(self): - engine = self._get_engine(sync=True) - session_maker = sessionmaker(bind=engine, - class_=Session, - autoflush=True, - expire_on_commit=True) - engine.dispose() - with session_maker.begin() as session: - self.session = session + with self.engine() as self.temp_engine: + session_maker = sessionmaker(bind=self.temp_engine, + class_=Session, + autoflush=True, + expire_on_commit=True) + with session_maker.begin() as self.session: self._test_connection_sync(self.session) - db_utils = DBUtils(self.session) - return db_utils + return self.session def __exit__(self, exc_type, exc_val, exc_tb): - self.session.close() + if self.session: + self.session.close() + if self.temp_engine: + self.temp_engine.dispose() async def __aenter__(self): - engine = self._get_engine(sync=False) - session_maker = sessionmaker(bind=engine, - class_=AsyncSession, - autoflush=True, - expire_on_commit=False) - await engine.dispose() - async with session_maker.begin() as session: - self.session = session + async with self.async_engine() as self.temp_engine: + session_maker = sessionmaker(bind=self.temp_engine, + class_=AsyncSession, + autoflush=True, + expire_on_commit=False) + async with session_maker.begin() as self.session: await self._test_connection_async(self.session) - db_utils = DBUtilsAsync(self.session) - return db_utils + return self.session async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.session.close() + if self.session: + await self.session.close() + if self.temp_engine: + await self.temp_engine.dispose() - def _get_engine(self, sync: bool = True) -> base.Engine | AsyncEngine: + @contextmanager + def engine(self) -> Engine: _connection_url = URL.create( - drivername=self.sync_driver if sync else self.async_driver, - host=self.host, - port=self.port, - username=self.username, - password=self.password, - database=self.database, + **self.connection_url, + drivername=self.sync_driver ) - _engine_args = { "url": _connection_url, - "echo": self.echo, } + engine = create_engine(**_engine_args) + yield engine - if sync: - engine = sa.create_engine(**_engine_args) - else: - engine = create_async_engine(**_engine_args) - - return engine + @asynccontextmanager + async def async_engine(self) -> AsyncEngine: + _connection_url = URL.create( + **self.connection_url, + drivername=self.async_driver + ) + _engine_args = { + "url": _connection_url, + } + engine = create_async_engine(**_engine_args) + yield engine def _test_connection_sync(self, session: Session) -> None: host_url = URL.create( drivername=self.sync_driver, + username=self.username, host=self.host, port=self.port, - username=self.username, database=self.database, ) test_connection = TestConnections(sync_session=session, host_url=host_url) @@ -104,9 +122,9 @@ def _test_connection_sync(self, session: Session) -> None: async def _test_connection_async(self, session: AsyncSession) -> None: host_url = URL.create( drivername=self.async_driver, + username=self.username, host=self.host, port=self.port, - username=self.username, database=self.database, ) test_connection = TestConnections(async_session=session, host_url=host_url) diff --git a/ddcDatabases/settings.py b/ddcDatabases/settings.py index fcb6f01..e0535ee 100755 --- a/ddcDatabases/settings.py +++ b/ddcDatabases/settings.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from typing import Optional from dotenv import load_dotenv from pydantic import Field from pydantic_settings import BaseSettings, SettingsConfigDict @@ -10,8 +11,8 @@ class SQLiteSettings(BaseSettings): """settings defined here with fallback to reading ENV variables""" - file_path: str = Field(default="sqlite.db") - echo: bool = Field(default=False) + file_path: Optional[str] = Field(default="sqlite.db") + echo: Optional[bool] = Field(default=False) model_config = SettingsConfigDict(env_prefix="SQLITE_", env_file=".env", extra="allow") @@ -19,15 +20,15 @@ class SQLiteSettings(BaseSettings): class PostgreSQLSettings(BaseSettings): """settings defined here with fallback to reading ENV variables""" - host: str = Field(default="localhost") - port: int = Field(default=1433) - username: str = Field(default="sa") - password: str = Field(default=None) - database: str = Field(default="master") + host: Optional[str] = Field(default="localhost") + port: Optional[int] = Field(default=5432) + username: Optional[str] = Field(default="postgres") + password: Optional[str] = Field(default="postgres") + database: Optional[str] = Field(default="postgres") - echo: bool = Field(default=False) - async_driver: str = Field(default="postgresql+asyncpg") - sync_driver: str = Field(default="postgresql+psycopg2") + echo: Optional[bool] = Field(default=False) + async_driver: Optional[str] = Field(default="postgresql+asyncpg") + sync_driver: Optional[str] = Field(default="postgresql+psycopg2") model_config = SettingsConfigDict(env_prefix="POSTGRESQL_", env_file=".env", extra="allow") @@ -35,18 +36,18 @@ class PostgreSQLSettings(BaseSettings): class MSSQLSettings(BaseSettings): """settings defined here with fallback to reading ENV variables""" - host: str = Field(default="localhost") - port: int = Field(default=1433) - username: str = Field(default="sa") - password: str = Field(default=None) - db_schema: str = Field(default="dbo") - database: str = Field(default="master") - - echo: bool = Field(default=False) - pool_size: int = Field(default=20) - max_overflow: int = Field(default=10) - odbcdriver_version: int = Field(default=18) - async_driver: str = Field(default="mssql+aioodbc") - sync_driver: str = Field(default="mssql+pyodbc") + host: Optional[str] = Field(default="localhost") + port: Optional[int] = Field(default=1433) + username: Optional[str] = Field(default="sa") + password: Optional[str] = Field(default=None) + db_schema: Optional[str] = Field(default="dbo") + database: Optional[str] = Field(default="master") + + echo: Optional[bool] = Field(default=False) + pool_size: Optional[int] = Field(default=20) + max_overflow: Optional[int] = Field(default=10) + odbcdriver_version: Optional[int] = Field(default=18) + async_driver: Optional[str] = Field(default="mssql+aioodbc") + sync_driver: Optional[str] = Field(default="mssql+pyodbc") model_config = SettingsConfigDict(env_prefix="MSSQL_", env_file=".env", extra="allow") diff --git a/ddcDatabases/sqlite.py b/ddcDatabases/sqlite.py index 48c6251..5d4de1b 100755 --- a/ddcDatabases/sqlite.py +++ b/ddcDatabases/sqlite.py @@ -1,9 +1,10 @@ # -*- encoding: utf-8 -*- import sys +from contextlib import contextmanager from datetime import datetime +from typing import Optional from sqlalchemy.engine import create_engine, Engine from sqlalchemy.orm import Session, sessionmaker -from .db_utils import DBUtils from .settings import SQLiteSettings @@ -14,41 +15,45 @@ class Sqlite: def __init__( self, - file_path: str = None, - echo: bool = None, + file_path: Optional[str] = None, + echo: Optional[bool] = None, ): _settings = SQLiteSettings() + self.temp_engine = None self.session = None - self.file_path = _settings.file_path if not file_path else file_path - self.echo = _settings.echo if not echo else echo + self.file_path = file_path or _settings.file_path + self.echo = echo or _settings.echo def __enter__(self): - engine = self._get_engine() - session_maker = sessionmaker(bind=engine, - class_=Session, - autoflush=True, - expire_on_commit=True) - engine.dispose() - with session_maker.begin() as session: - self.session = session - db_utils = DBUtils(self.session) - return db_utils + with self.engine() as self.temp_engine: + session_maker = sessionmaker(bind=self.temp_engine, + class_=Session, + autoflush=True, + expire_on_commit=True) + + with session_maker.begin() as self.session: + return self.session def __exit__(self, exc_type, exc_val, exc_tb): - self.session.close() + if self.session: + self.session.close() + if self.temp_engine: + self.temp_engine.dispose() - def _get_engine(self) -> Engine | None: + @contextmanager + def engine(self) -> Engine | None: try: _engine_args = { "url": f"sqlite:///{self.file_path}", "echo": self.echo, } - engine = create_engine(**_engine_args) - return engine + yield engine except Exception as e: dt = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] sys.stderr.write( - f"[{dt}]:[ERROR]:Unable to Create Database Engine | {repr(e)}" + f"[{dt}]:" + "[ERROR]:Unable to Create Database Engine | " + f"{repr(e)}" ) raise diff --git a/poetry.lock b/poetry.lock index 8f7ee8e..f32eb4b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -203,13 +203,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "33.0.0" +version = "33.1.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-33.0.0-py3-none-any.whl", hash = "sha256:68e5580cb6b4226710886e595eabc13127149d6e71e9d1db65506a7fbe2c7fce"}, - {file = "faker-33.0.0.tar.gz", hash = "sha256:9b01019c1ddaf2253ca2308c0472116e993f4ad8fc9905f82fa965e0c6f932e9"}, + {file = "Faker-33.1.0-py3-none-any.whl", hash = "sha256:d30c5f0e2796b8970de68978365247657486eb0311c5abe88d0b895b68dff05d"}, + {file = "faker-33.1.0.tar.gz", hash = "sha256:1c925fc0e86a51fc46648b504078c88d0cd48da1da2595c4e712841cab43a1e4"}, ] [package.dependencies] @@ -359,13 +359,13 @@ files = [ [[package]] name = "pydantic" -version = "2.10.1" +version = "2.10.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e"}, - {file = "pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560"}, + {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, + {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, ] [package.dependencies] @@ -713,13 +713,43 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "tomli" -version = "2.1.0" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, - {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -741,4 +771,4 @@ pgsql = ["asyncpg", "psycopg2"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "91868ce45dc728a22563453c0ad8cbb9deee1db12c0d66f5aeadd839ce307607" +content-hash = "8bb7b3d44405247702cfe12833bf92a0bd1a275cf996e44f94cdb8884a7f7156" diff --git a/pyproject.toml b/pyproject.toml index f29064f..c04b740 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ pyodbc = ">=5.2.0" aioodbc = ">=0.5.0" psycopg2 = "^2.9.10" asyncpg = "^0.30.0" +pytest = "^8.3.3" [tool.poetry.extras] mssql = [ "pyodbc", "aioodbc" ] diff --git a/tests/conftest.py b/tests/conftest.py index baf336c..e8f88ee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,21 +1,17 @@ # -*- coding: utf-8 -*- import pytest -import sqlalchemy as sa -from sqlalchemy.orm import Session -from tests.data.base_data import database_engine, get_fake_test_data -from tests.models.sqlite_model import ModelTest +from ddcDatabases import Sqlite +from tests.data.base_data import db_filename, get_fake_test_data -@pytest.fixture(name="db_session") +@pytest.fixture(name="db_session", scope="session") def db_session(): - with Session(database_engine) as session: + with Sqlite(db_filename) as session: yield session -@pytest.fixture +@pytest.fixture(name="fake_test_data", scope="session") def fake_test_data(db_session): # init fdata = get_fake_test_data() yield fdata - # teardown - db_session.execute(sa.delete(ModelTest)) diff --git a/tests/dal/sqlite_dal.py b/tests/dal/sqlite_dal.py index 958c0b6..c30ff4c 100644 --- a/tests/dal/sqlite_dal.py +++ b/tests/dal/sqlite_dal.py @@ -23,5 +23,5 @@ def get(self, test_id: int): stmt = sa.select(*self.columns).where(ModelTest.id == test_id) results = self.db_utils.fetchall(stmt) return results - except DBFetchAllException: + except DBFetchAllException as e: return None diff --git a/tests/data/base_data.py b/tests/data/base_data.py index f5bd65a..006a5da 100644 --- a/tests/data/base_data.py +++ b/tests/data/base_data.py @@ -1,9 +1,8 @@ # -*- coding: utf-8 -*- from faker import Faker -from ddcDatabases import DBSqlite -database_engine = DBSqlite(":memory:").engine() +db_filename = "test.db" def _set_randoms(): diff --git a/tests/unit/test_sqlite.py b/tests/unit/test_sqlite.py index 7c2d703..a5fb213 100644 --- a/tests/unit/test_sqlite.py +++ b/tests/unit/test_sqlite.py @@ -1,24 +1,32 @@ # -*- coding: utf-8 -*- +import os import pytest +from ddcDatabases import Sqlite from tests.dal.sqlite_dal import SqliteDal -from tests.data.base_data import database_engine +from tests.data.base_data import db_filename from tests.models.sqlite_model import ModelTest -class TestConfigDal: +def db_engine(): + with Sqlite(db_filename).engine() as engine: + return engine + + +class TestSQLite: @classmethod def setup_class(cls): - ModelTest.__table__.create(database_engine) + ModelTest.__table__.create(db_engine()) @classmethod def teardown_class(cls): - ModelTest.__table__.drop(database_engine) + ModelTest.__table__.drop(db_engine()) + os.remove(db_filename) - @pytest.fixture(autouse=True) + @pytest.fixture(autouse=True, scope="class") def test_insert(self, db_session, fake_test_data): + db_session.add(ModelTest(**fake_test_data)) config_dal = SqliteDal(db_session) config_id = fake_test_data["id"] - db_session.add(ModelTest(**fake_test_data)) results = config_dal.get(config_id) assert len(results) == 1