From d764e947519eb17a00d5d9e5a80735a62ca5e8eb Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Sat, 13 Nov 2021 10:18:52 +0100 Subject: [PATCH 01/10] Migrate Reminder endpoints from the site API --- alembic/env.py | 4 +- api/core/database/models/api/bot/reminder.py | 2 +- api/endpoints/__init__.py | 8 + api/endpoints/dependencies/__init__.py | 0 api/endpoints/dependencies/database.py | 16 ++ api/endpoints/reminder/__init__.py | 1 + .../reminder/reminder_dependencies.py | 8 + api/endpoints/reminder/reminder_endpoints.py | 223 ++++++++++++++++++ api/endpoints/reminder/reminder_schemas.py | 59 +++++ api/main.py | 8 +- tox.ini | 7 +- 11 files changed, 324 insertions(+), 12 deletions(-) create mode 100644 api/endpoints/dependencies/__init__.py create mode 100644 api/endpoints/dependencies/database.py create mode 100644 api/endpoints/reminder/__init__.py create mode 100644 api/endpoints/reminder/reminder_dependencies.py create mode 100644 api/endpoints/reminder/reminder_endpoints.py create mode 100644 api/endpoints/reminder/reminder_schemas.py diff --git a/alembic/env.py b/alembic/env.py index 897f135..990bb11 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -48,7 +48,7 @@ def run_migrations_offline() -> None: context.run_migrations() -def do_run_migrations(connection: AsyncConnection): +def do_run_migrations(connection: AsyncConnection) -> None: """Run all migrations on the given connection.""" context.configure( connection=connection, @@ -61,7 +61,7 @@ def do_run_migrations(connection: AsyncConnection): context.run_migrations() -async def run_migrations_online(): +async def run_migrations_online() -> None: """ Run migrations in 'online' mode. diff --git a/api/core/database/models/api/bot/reminder.py b/api/core/database/models/api/bot/reminder.py index 877dd95..06035b4 100644 --- a/api/core/database/models/api/bot/reminder.py +++ b/api/core/database/models/api/bot/reminder.py @@ -25,7 +25,7 @@ class Reminder(Base): # Whether this reminder is still active. # If not, it has been sent out to the user. - active = Column(Boolean, nullable=False) + active = Column(Boolean, nullable=False, default=True) # The channel ID that this message was # sent in, taken from Discord. channel_id = Column(BigInteger, nullable=False) diff --git a/api/endpoints/__init__.py b/api/endpoints/__init__.py index dd2965e..bc37a7f 100644 --- a/api/endpoints/__init__.py +++ b/api/endpoints/__init__.py @@ -5,3 +5,11 @@ There are currently no plan to use a strictly versioned API design, as this API is currently tightly coupled with a single client application. """ + +from fastapi import APIRouter + +from .reminder.reminder_endpoints import reminder + +bot_router = APIRouter(prefix="/bot") + +bot_router.include_router(reminder) diff --git a/api/endpoints/dependencies/__init__.py b/api/endpoints/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/endpoints/dependencies/database.py b/api/endpoints/dependencies/database.py new file mode 100644 index 0000000..49dc010 --- /dev/null +++ b/api/endpoints/dependencies/database.py @@ -0,0 +1,16 @@ +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker + +from api.core import settings + +engine = create_async_engine(settings.database_url, future=True) +session_factory = sessionmaker(engine, class_=AsyncSession) + + +async def create_database_session() -> None: + """A FastAPI dependency that creates an SQLAlchemy session.""" + try: + async with session_factory() as session: + yield session + finally: + await session.close() diff --git a/api/endpoints/reminder/__init__.py b/api/endpoints/reminder/__init__.py new file mode 100644 index 0000000..1f699ff --- /dev/null +++ b/api/endpoints/reminder/__init__.py @@ -0,0 +1 @@ +from .reminder_endpoints import reminder diff --git a/api/endpoints/reminder/reminder_dependencies.py b/api/endpoints/reminder/reminder_dependencies.py new file mode 100644 index 0000000..3df1753 --- /dev/null +++ b/api/endpoints/reminder/reminder_dependencies.py @@ -0,0 +1,8 @@ +from fastapi import Depends + +from .reminder_schemas import ReminderFilter + + +async def filter_values(reminder_filter: ReminderFilter = Depends()) -> dict: + """Returns a dictionary exported from a ReminderFilter model from the Path, with None values excluded.""" + return reminder_filter.dict(exclude_none=True) diff --git a/api/endpoints/reminder/reminder_endpoints.py b/api/endpoints/reminder/reminder_endpoints.py new file mode 100644 index 0000000..b62f378 --- /dev/null +++ b/api/endpoints/reminder/reminder_endpoints.py @@ -0,0 +1,223 @@ +from typing import Optional, Union + +from fastapi import APIRouter, Depends +from fastapi.responses import JSONResponse +from sqlalchemy import select, update +from sqlalchemy.ext.asyncio import AsyncSession + +from api.core.database.models.api.bot import Reminder, User +from api.core.schemas import ErrorMessage +from api.endpoints.dependencies.database import create_database_session +from .reminder_dependencies import filter_values +from .reminder_schemas import ReminderCreateIn, ReminderPatchIn, ReminderResponse + +reminder = APIRouter(prefix="/reminders") + + +@reminder.get( + "/", + status_code=200, + response_model=list[ReminderResponse], + response_model_by_alias=False, + responses={404: {"model": ErrorMessage}}, +) +async def get_reminders( + db_session: AsyncSession = Depends(create_database_session), + db_filter_values: dict = Depends(filter_values), +) -> Union[JSONResponse, list[ReminderResponse], None]: + """ + ### GET /bot/reminders. + + Returns all reminders in the database. + #### Response format + >>> [ + ... { + ... 'active': True, + ... 'author': 1020103901030, + ... 'mentions': [ + ... 336843820513755157, + ... 165023948638126080, + ... 267628507062992896 + ... ], + ... 'content': "Make dinner", + ... 'expiration': '5018-11-20T15:52:00Z', + ... 'id': 11, + ... 'channel_id': 634547009956872193, + ... 'jump_url': "https://discord.com/channels///", + ... 'failures': 3 + ... }, + ... ... + ... ] + #### Status codes + - 200: returned on success + ## Authentication + Requires an API token. + """ + if not db_filter_values: + if not (results := (await db_session.execute(select(Reminder))).scalars().all()): + return [] + return results + elif not (filtered_results := (await db_session.execute( + select(Reminder). + filter_by(**db_filter_values)) + ).scalars().all()): + return JSONResponse( + status_code=404, + content={ + "error": "There are no reminders with the specified filter values." + }, + ) + else: + return filtered_results + + +@reminder.get( + "/{reminder_id}", + status_code=200, + response_model=ReminderResponse, + response_model_by_alias=False, + responses={404: {"model": ErrorMessage}}, +) +async def get_reminder_by_id( + reminder_id: int, db_session: AsyncSession = Depends(create_database_session) +) -> Union[JSONResponse, ReminderResponse]: + """ + ### GET /bot/reminders/. + + Fetches the reminder with the given id. + #### Response format + >>> + ... { + ... 'active': True, + ... 'author': 1020103901030, + ... 'mentions': [ + ... 336843820513755157, + ... 165023948638126080, + ... 267628507062992896 + ... ], + ... 'content': "Make dinner", + ... 'expiration': '5018-11-20T15:52:00Z', + ... 'id': 11, + ... 'channel_id': 634547009956872193, + ... 'jump_url': "https://discord.com/channels///", + ... 'failures': 3 + ... } + #### Status codes + - 200: returned on success + - 404: returned when the reminder doesn't exist + ## Authentication + Requires an API token. + """ + if not (result := (await db_session.execute(select(Reminder).filter_by(id=reminder_id))).scalars().first()): + return JSONResponse( + status_code=404, + content={"error": "There is no reminder in the database with that id!"}, + ) + return result + + +@reminder.patch( + "/{reminder_id}", + status_code=200, + responses={404: {"model": ErrorMessage}, 400: {"model": ErrorMessage}}, +) +async def edit_reminders( + reminder_id: int, + reminder_patch_in: ReminderPatchIn, + db_session: AsyncSession = Depends(create_database_session), +) -> Optional[JSONResponse]: + """ + ### PATCH /bot/reminders/. + + Update the user with the given `id`. + All fields in the request body are optional. + #### Request body + >>> { + ... 'mentions': list[int], + ... 'content': str, + ... 'expiration': str, # ISO-formatted datetime + ... 'failures': int + ... } + #### Status codes + - 200: returned on success + - 400: if the body format is invalid + - 404: if no user with the given ID could be found + ## Authentication + Requires an API token. + """ + if not (await db_session.execute(select(Reminder).filter_by(id=reminder_id))).scalars().first(): + return JSONResponse( + status_code=404, + content={"error": "There is no reminder with that id in the database!"}, + ) + await db_session.execute( + update(Reminder).where(Reminder.id == reminder_id).values(**reminder_patch_in.dict(exclude_none=True)) + ) + await db_session.commit() + + +@reminder.post( + "/", + status_code=201, + responses={404: {"model": ErrorMessage}, 400: {"model": ErrorMessage}}, +) +async def create_reminders( + reminder_in: ReminderCreateIn, + db_session: AsyncSession = Depends(create_database_session), +) -> Optional[JSONResponse]: + """ + ### POST /bot/reminders. + + Create a new reminder. + #### Request body + >>> { + ... 'author': int, + ... 'mentions': list[int], + ... 'content': str, + ... 'expiration': str, # ISO-formatted datetime + ... 'channel_id': int, + ... 'jump_url': str + ... } + #### Status codes + - 201: returned on success + - 400: if the body format is invalid + - 404: if no user with the given ID could be found + ## Authentication + Requires an API token. + """ + if not (await db_session.execute(select(User).filter_by(id=reminder_in.author_id))).scalars().first(): + return JSONResponse( + status_code=404, + content={"error": "There is no user with that id in the database!"}, + ) + new_reminder = Reminder(**reminder_in.dict()) + db_session.add(new_reminder) + await db_session.commit() + + +@reminder.delete( + "/{reminder_id}", status_code=204, responses={404: {"model": ErrorMessage}} +) +async def delete_reminders( + reminder_id: int, db_session: AsyncSession = Depends(create_database_session) +) -> Optional[JSONResponse]: + """ + ### DELETE /bot/reminders/. + + Delete the reminder with the given `id`. + #### Status codes + - 204: returned on success + - 404: if a reminder with the given `id` does not exist + ## Authentication + Requires an API token. + """ + if not (reminder_to_delete := (await db_session.execute( + select(Reminder). + filter_by(id=reminder_id)) + ).scalars().first()): + return JSONResponse( + status_code=404, + content={"error": "There is no reminder with that id in the database"}, + ) + await db_session.delete(reminder_to_delete) + await db_session.commit() diff --git a/api/endpoints/reminder/reminder_schemas.py b/api/endpoints/reminder/reminder_schemas.py new file mode 100644 index 0000000..695e4be --- /dev/null +++ b/api/endpoints/reminder/reminder_schemas.py @@ -0,0 +1,59 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field, validator + + +class ReminderResponse(BaseModel): + """Scheme representing a response for a Reminder.""" + + active: bool + author: int = Field(alias="author_id") + mentions: list[int] + content: str + expiration: datetime # ISO-formatted datetime + id: int + channel_id: int + jump_url: str + failures: int + + @validator("expiration") + def parse_expiration(cls, value: datetime) -> str: # noqa N805 + """A parser that transforms datetimes into isoformat.""" + try: + return value.isoformat() + except (ValueError, TypeError): + raise + + class Config: + """Configuration class to enable ORM mode.""" + + allow_population_by_field_name = False + orm_mode = True + + +class ReminderCreateIn(BaseModel): + """A model representing an incoming Reminder on creation.""" + + author_id: int = Field(alias="author") + mentions: list[int] + content: str + expiration: datetime # ISO-formatted datetime + channel_id: int + jump_url: str + + +class ReminderPatchIn(BaseModel): + """A model representing a batch of data what has to be updated on a Reminder.""" + + mentions: Optional[list[int]] = Field(None) + content: Optional[str] = Field(None) + expiration: Optional[str] = Field(None) # ISO-formatted datetime + failures: Optional[int] = Field(None) + + +class ReminderFilter(BaseModel): + """A schema representing possible choices for filtering Reminder queries.""" + + author_id: Optional[int] = Field(alias="author__id") + active: Optional[bool] diff --git a/api/main.py b/api/main.py index 35255d2..8d35869 100644 --- a/api/main.py +++ b/api/main.py @@ -15,16 +15,15 @@ import typing from fastapi import FastAPI -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker from starlette.middleware.authentication import AuthenticationMiddleware - from api.core.middleware import TokenAuthentication, on_auth_error from api.core.schemas import ErrorMessage, HealthCheck from api.core.settings import settings +from api.endpoints import bot_router app = FastAPI() +app.include_router(bot_router) # Add our middleware that will try to authenticate # all requests, excluding /docs and /openapi.json @@ -35,9 +34,6 @@ on_error=on_auth_error, ) -engine = create_engine(settings.database_url) -SessionLocal = sessionmaker(bind=engine) - @app.get("/", response_model=HealthCheck, responses={403: {"model": ErrorMessage}}) async def health_check() -> dict[str, typing.Union[str, int, list[str]]]: diff --git a/tox.ini b/tox.ini index 0fee5e2..74b24fa 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [flake8] max-line-length=120 -per-file-ignores=__init__.py:F401,tests/*:S,D100,D104 +per-file-ignores=__init__.py:F401,tests/*:S, D,ANN docstring-convention=all import-order-style=pycharm application_import_names=api,tests @@ -8,7 +8,8 @@ exclude=gunicorn.conf.py ignore= # black compatibility: E203 - + # FastAPI dependencies + B008 B311,W503,E226,S311,T000 # Missing Docstrings D100,D104,D105,D107, @@ -24,4 +25,4 @@ ignore= [coverage:run] branch=True -source=api,tests +source=api,tests \ No newline at end of file From 6f6c60ae2f451c5dba105b8b490ad1f8d6816793 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Sun, 14 Nov 2021 22:10:32 +0100 Subject: [PATCH 02/10] Implement Asynchronous Reminder tests. - Add an Init sql script to create a test database that can be used for testing purposes. - Create a pytest 'conftest' with global fixtures for future testing purposes - Implement Reminder tests in an async manner. - Add an additional fixture to properly access instance attributes in async pytest fixtures --- api/core/database/models/api/bot/user.py | 2 +- api/main.py | 10 +- docker-compose.yaml | 2 + postgres/init.sql | 1 + tests/conftest.py | 74 ++++++ tests/endpoints/__init__.py | 0 tests/endpoints/test_reminders.py | 274 +++++++++++++++++++++++ 7 files changed, 361 insertions(+), 2 deletions(-) create mode 100644 postgres/init.sql create mode 100644 tests/conftest.py create mode 100644 tests/endpoints/__init__.py create mode 100644 tests/endpoints/test_reminders.py diff --git a/api/core/database/models/api/bot/user.py b/api/core/database/models/api/bot/user.py index 46db955..bc9f9b0 100644 --- a/api/core/database/models/api/bot/user.py +++ b/api/core/database/models/api/bot/user.py @@ -38,7 +38,7 @@ class User(Base): in_guild = Column(Boolean, nullable=False, default=True) # IDs of roles the user has on the server - roles = Column(ARRAY(BigInteger()), nullable=False) + roles = Column(ARRAY(BigInteger()), nullable=False, default=[]) @validates("id") def validate_user_id(self, _key: str, user_id: int) -> Union[int, NoReturn]: diff --git a/api/main.py b/api/main.py index 8d35869..ac4b269 100644 --- a/api/main.py +++ b/api/main.py @@ -14,7 +14,9 @@ import datetime import typing -from fastapi import FastAPI +from fastapi import FastAPI, Request +from fastapi.exceptions import RequestValidationError +from fastapi.responses import JSONResponse from starlette.middleware.authentication import AuthenticationMiddleware from api.core.middleware import TokenAuthentication, on_auth_error @@ -35,6 +37,12 @@ ) +@app.exception_handler(RequestValidationError) +async def handle_req_validation_error(req: Request, exc: RequestValidationError) -> JSONResponse: + """A default handler to handle malformed request bodies.""" + return JSONResponse(status_code=400, content={"error": exc.errors()}) + + @app.get("/", response_model=HealthCheck, responses={403: {"model": ErrorMessage}}) async def health_check() -> dict[str, typing.Union[str, int, list[str]]]: """Perform an API health check, including timestamp and commit sha.""" diff --git a/docker-compose.yaml b/docker-compose.yaml index 34a2bc9..c15ff63 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -13,6 +13,8 @@ services: interval: 2s timeout: 1s retries: 5 + volumes: + - ./postgres/init.sql:/docker-entrypoint-initdb.d/init.sql web: build: context: . diff --git a/postgres/init.sql b/postgres/init.sql new file mode 100644 index 0000000..eee2d3a --- /dev/null +++ b/postgres/init.sql @@ -0,0 +1 @@ +CREATE DATABASE test; \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..20e4854 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,74 @@ +import asyncio +from typing import Generator, Callable +from urllib.parse import urlsplit, urlunsplit + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, AsyncEngine +from fastapi import FastAPI +from httpx import AsyncClient + +from api.core.settings import settings +from api.endpoints.dependencies.database import session_factory, engine, create_database_session +from api.core.database import Base +from api.main import app as main_app + +AUTH_HEADER = {"Authorization": f"Bearer {settings.auth_token}"} +test_engine = create_async_engine(settings.database_url, future=True, isolation_level="AUTOCOMMIT") + + +@pytest.fixture(scope="session") +def event_loop(request) -> Generator: + loop = asyncio.get_event_loop() + yield loop + loop.close() + + +@pytest.fixture(scope="session") +async def create_test_database_engine() -> Generator: + test_db_url = urlsplit(settings.database_url)._replace(path="/test") + engine = create_async_engine(urlunsplit(test_db_url), future=True) + yield engine + await engine.dispose() + + +@pytest.fixture() +async def async_db_session(create_test_database_engine: AsyncEngine) -> AsyncSession: + async with create_test_database_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await conn.run_sync(Base.metadata.create_all) + async with AsyncSession(bind=conn, expire_on_commit=False) as session: + yield session + await session.close() + + +@pytest.fixture(scope="session", autouse=True) +async def global_teardown(create_test_database_engine: AsyncEngine): + yield + async with create_test_database_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + + +@pytest.fixture() +def override_db_session(async_db_session: AsyncSession): + async def _override_db_session(): + yield async_db_session + + return _override_db_session + + +@pytest.fixture() +def app(override_db_session: Callable): + main_app.dependency_overrides[create_database_session] = override_db_session + return main_app + + +@pytest.fixture() +async def unauthed_client(app: FastAPI): + async with AsyncClient(app=app, base_url="http://testserver") as httpx_client: + yield httpx_client + + +@pytest.fixture() +async def client(app: FastAPI): + async with AsyncClient(app=app, base_url="http://testserver", headers=AUTH_HEADER) as httpx_client: + yield httpx_client diff --git a/tests/endpoints/__init__.py b/tests/endpoints/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/endpoints/test_reminders.py b/tests/endpoints/test_reminders.py new file mode 100644 index 0000000..30fe43d --- /dev/null +++ b/tests/endpoints/test_reminders.py @@ -0,0 +1,274 @@ +from datetime import datetime +from operator import itemgetter + +import pytest +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from api.core.database.models.api.bot import Reminder, User +from api.endpoints.reminder.reminder_schemas import ReminderResponse + + +pytestmark = pytest.mark.asyncio + + +class TestUnauthedReminderAPI: + + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(autouse=True) + async def inject_config_unauthed_reminder(self, unauthed_client, app, yield_self): + yield_self.client = unauthed_client + yield_self.app = app + + async def test_reminders_returns_403(self): + url = self.app.url_path_for("get_reminders") + response = await self.client.get(url) + assert response.status_code == 403 + + async def test_reminder_by_id_returns_403(self): + url = self.app.url_path_for("get_reminder_by_id", reminder_id=12) + response = await self.client.get(url) + assert response.status_code == 403 + + async def test_create_returns_403(self): + url = self.app.url_path_for("create_reminders") + response = await self.client.post(url, json={"not": "important"}) + assert response.status_code == 403 + + async def test_patch_returns_403(self): + url = self.app.url_path_for("edit_reminders", reminder_id=12) + response = await self.client.patch(url, json={"not": "important"}) + assert response.status_code == 403 + + async def test_delete_returns_403(self): + url = self.app.url_path_for("delete_reminders", reminder_id=12) + response = await self.client.delete(url) + assert response.status_code == 403 + + +class TestEmptyDatabaseReminderAPI: + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(autouse=True) + async def inject_config_empty_db(self, client, app, yield_self): + yield_self.client = client + yield_self.app = app + + async def test_list_all_returns_empty_list(self): + url = self.app.url_path_for("get_reminders") + response = await self.client.get(url) + assert response.status_code == 200 + assert response.json() == [] + + async def test_delete_returns_404(self): + url = self.app.url_path_for("delete_reminders", reminder_id=1234) + response = await self.client.delete(url) + assert response.status_code == 404 + + +class TestReminderCreation: + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(autouse=True) + async def inject_config_reminder_creation_eizo(self, async_db_session, client, app, yield_self): + yield_self.client = client + yield_self.app = app + test_user = User(name="test_user", discriminator=1212) + async_db_session.add(test_user) + await async_db_session.commit() + await async_db_session.refresh(test_user) + yield_self.test_user = test_user + + async def test_accepts_valid_data(self, async_db_session): + data = { + "author": self.test_user.id, + "mentions": [8888], + "content": "Test", + "expiration": datetime.utcnow().isoformat(), + "channel_id": 1, + "jump_url": "https://github.com", + } + url = self.app.url_path_for("create_reminders") + response = await self.client.post(url, json=data) + await async_db_session.commit() + assert (await async_db_session.execute(select(Reminder))).scalars().first() + assert response.status_code == 201 + + async def test_rejects_invalid_data(self, async_db_session): + data = { + "author_id": 1, + } + url = self.app.url_path_for("create_reminders") + response = await self.client.post(url, json=data) + assert response.status_code == 400 + assert not (await async_db_session.execute(select(Reminder))).scalars().first() + + class TestReminderDeletion: + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(scope="function", autouse=True) + async def inject_config_reminder_deletion(self, async_db_session, yield_self: "TestReminderDeletion", client, + app): + yield_self.app = app + yield_self.client = client + test_user = User(name="test_user", discriminator=1212) + test_reminder = Reminder( + channel_id=1, + content="test", + expiration=datetime.now(), + author=test_user, + jump_url="https://github.com", + mentions=[1] + ) + async_db_session.add(test_user) + async_db_session.add(test_reminder) + await async_db_session.commit() + await async_db_session.refresh(test_reminder) + + yield_self.test_reminder = test_reminder + + async def test_delete_unknown_reminder_returns_404(self): + url = self.app.url_path_for("delete_reminders", reminder_id=1234) + response = await self.client.delete(url) + assert response.status_code == 404 + + async def test_delete_known_reminder_returns_200(self, async_db_session): + url = self.app.url_path_for("delete_reminders", reminder_id=self.test_reminder.id) + response = await self.client.delete(url) + await async_db_session.commit() + assert response.status_code == 204 + assert not (await async_db_session.execute( + select(Reminder).where(Reminder.id == self.test_reminder.id))).scalars().first() + + class TestReminderList: + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(scope="function", autouse=True) + async def inject_config_reminder_list(self, async_db_session, yield_self, client, app): + yield_self.client = client + yield_self.app = app + test_user_first = User(name="test_user", discriminator=1212) + test_user_second = User(name="test_user2", discriminator=1212) + test_reminder_one = Reminder( + active=False, + channel_id=1, + content="test", + expiration=datetime.now(), + author=test_user_first, + jump_url="https://github.com" + ) + test_reminder_two = Reminder( + channel_id=1, + content="test2", + expiration=datetime.now(), + author=test_user_second, + jump_url="https://github.com" + ) + + async_db_session.add_all([test_user_first, test_user_second, test_reminder_one, test_reminder_two]) + await async_db_session.commit() + await async_db_session.refresh(test_reminder_one) + await async_db_session.refresh(test_reminder_two) + yield_self.test_reminder_one = ReminderResponse.from_orm(test_reminder_one).dict() + yield_self.test_reminder_two = ReminderResponse.from_orm(test_reminder_two).dict() + + async def test_reminders_in_full_list(self): + url = self.app.url_path_for("get_reminders") + response = await self.client.get(url, ) + assert response.status_code == 200 + assert sorted(response.json(), key=itemgetter("id")) == sorted( + [self.test_reminder_one, self.test_reminder_two], + key=itemgetter("id"), + ) + + async def test_filter_by_active_field(self): + url = self.app.url_path_for("get_reminders") + response = await self.client.get(url, params={"active": True}) + assert response.status_code == 200 + assert response.json() == [self.test_reminder_two] + + async def test_filter_by_author_field(self): + url = self.app.url_path_for("get_reminders") + response = await self.client.get(url, params={"author__id": self.test_reminder_one["author"]}) + assert response.status_code == 200 + assert response.json() == [self.test_reminder_one] + + class TestReminderRetrieve: + + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(scope="function", autouse=True) + async def inject_config_reminder_retrieve(self, async_db_session, client, app, yield_self): + yield_self.app = app + yield_self.client = client + + test_user = User(name="test_user", discriminator=1212) + test_reminder = Reminder( + channel_id=1, + content="test", + expiration=datetime.now(), + author=test_user, + jump_url="https://github.com" + ) + async_db_session.add(test_user) + async_db_session.add(test_reminder) + await async_db_session.commit() + await async_db_session.refresh(test_reminder) + + yield_self.test_reminder = test_reminder + + async def test_retrieve_unknown_returns_404(self): + url = self.app.url_path_for("get_reminder_by_id", reminder_id=1234) + response = await self.client.get(url) + assert response.status_code == 404 + + async def test_retrieve_known_returns_200(self): + url = self.app.url_path_for("get_reminder_by_id", reminder_id=self.test_reminder.id) + response = await self.client.get(url) + assert response.status_code == 200 + + class TestReminderUpdate: + + @pytest.fixture() + def yield_self(self): + yield self + + @pytest.fixture(scope="function", autouse=True) + async def inject_config_reminder_update(self, async_db_session: AsyncSession, client, app, yield_self): + yield_self.app = app + yield_self.client = client + test_user = User(name="test_user", discriminator=1212) + test_reminder = Reminder( + channel_id=1, + content="test", + expiration=datetime.now(), + author=test_user, + jump_url="https://github.com", + mentions=[1] + ) + async_db_session.add(test_user) + async_db_session.add(test_reminder) + await async_db_session.commit() + await async_db_session.refresh(test_reminder) + yield_self.test_data = {"content": "Oops I forgot"} + yield_self.test_reminder = test_reminder + + async def test_patch_updates_record(self, async_db_session: AsyncSession): + url = self.app.url_path_for("edit_reminders", reminder_id=self.test_reminder.id) + response = await self.client.patch(url, json=self.test_data) + await async_db_session.commit() + assert response.status_code == 200 + assert (await async_db_session.execute(select(Reminder).filter_by(id=self.test_reminder.id))).scalars().first().content == self.test_data["content"] \ No newline at end of file From af7c746ac99bcd673ac9a43609374770f6dcd101 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Mon, 15 Nov 2021 20:44:32 +0100 Subject: [PATCH 03/10] Provided default values in various database models --- api/core/database/models/api/bot/nomination.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/database/models/api/bot/nomination.py b/api/core/database/models/api/bot/nomination.py index a60ab06..542f5ea 100644 --- a/api/core/database/models/api/bot/nomination.py +++ b/api/core/database/models/api/bot/nomination.py @@ -12,7 +12,7 @@ class Nomination(Base): __tablename__ = "api_nomination" # Whether this nomination is still relevant. - active = Column(Boolean, nullable=False) + active = Column(Boolean, nullable=False, default=True) user_id = Column( ForeignKey( @@ -28,7 +28,7 @@ class Nomination(Base): id = Column(Integer, primary_key=True, autoincrement=True) # Why the nomination was ended. - end_reason = Column(Text, nullable=False) + end_reason = Column(Text, nullable=False, default="") # When the nomination was ended. ended_at = Column(DateTime(True)) From 272bd4863bb37fd927d97f0f27cb7e37286f382e Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Mon, 15 Nov 2021 20:47:50 +0100 Subject: [PATCH 04/10] Add the test database to the testing workflow --- .github/workflows/lint-test.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 2009da9..f5fd89b 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -24,7 +24,7 @@ jobs: PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache AUTH_TOKEN: ci-token - DATABASE_URL: postgresql+asyncpg://pysite:pysite@postgres:5432/pysite + DATABASE_URL: postgresql+asyncpg://pysite:pysite@localhost:7777/pysite # Via https://github.com/actions/example-services/blob/main/.github/workflows/postgres-service.yml # services: @@ -52,6 +52,11 @@ jobs: uses: actions/setup-python@v2 with: python-version: '3.9' + + # Start the database early to give it a chance to get ready before + # we start running tests. + - name: Run database using docker-compose + run: docker-compose run -d -p 7777:5432 --name pydis_api postgres # This step caches our Python dependencies. To make sure we # only restore a cache when the dependencies, the python version, From edf9fb0c099adf88d8a3ee2d4df26873ac865c8d Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Sat, 20 Nov 2021 14:46:28 +0100 Subject: [PATCH 05/10] Increase code consistency in the test suite --- .github/workflows/lint-test.yml | 18 ++---------------- docker-compose.yaml | 10 +++++----- tests/conftest.py | 12 ++++++------ tests/endpoints/test_reminders.py | 7 ++++--- 4 files changed, 17 insertions(+), 30 deletions(-) diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index f5fd89b..9356703 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -24,21 +24,7 @@ jobs: PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache AUTH_TOKEN: ci-token - DATABASE_URL: postgresql+asyncpg://pysite:pysite@localhost:7777/pysite - - # Via https://github.com/actions/example-services/blob/main/.github/workflows/postgres-service.yml - # services: - # postgres: - # image: postgres:13 - # env: - # POSTGRES_USER: postgres - # POSTGRES_PASSWORD: postgres - # POSTGRES_DB: postgres - # ports: - # # Assign a random TCP port - # - 5432/tcp - # # needed because the postgres container does not provide a healthcheck - # options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + DATABASE_URL: postgresql+asyncpg://pydisapi:pydisapi@localhost:7777/pydisapi steps: - name: Add custom PYTHONUSERBASE to PATH @@ -56,7 +42,7 @@ jobs: # Start the database early to give it a chance to get ready before # we start running tests. - name: Run database using docker-compose - run: docker-compose run -d -p 7777:5432 --name pydis_api postgres + run: docker-compose run -d -p 7777:5432 --name pydisapi postgres # This step caches our Python dependencies. To make sure we # only restore a cache when the dependencies, the python version, diff --git a/docker-compose.yaml b/docker-compose.yaml index c15ff63..eecbb76 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -5,11 +5,11 @@ services: ports: - "127.0.0.1:7777:5432" environment: - POSTGRES_DB: pysite - POSTGRES_PASSWORD: pysite - POSTGRES_USER: pysite + POSTGRES_DB: pydisapi + POSTGRES_PASSWORD: pydisapi + POSTGRES_USER: pydisapi healthcheck: - test: [ "CMD-SHELL", "pg_isready -U pysite" ] + test: [ "CMD-SHELL", "pg_isready -U pydisapi" ] interval: 2s timeout: 1s retries: 5 @@ -30,6 +30,6 @@ services: - .:/app:ro - ./alembic/versions:/app/alembic/versions environment: - database_url: postgresql+asyncpg://pysite:pysite@postgres:5432/pysite + database_url: postgresql+asyncpg://pydisapi:pydisapi@postgres:5432/pydisapi auth_token: "my_token" debug: 1 diff --git a/tests/conftest.py b/tests/conftest.py index 20e4854..85253d3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,15 @@ import asyncio -from typing import Generator, Callable +from typing import Callable, Generator from urllib.parse import urlsplit, urlunsplit import pytest -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, AsyncEngine from fastapi import FastAPI from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine -from api.core.settings import settings -from api.endpoints.dependencies.database import session_factory, engine, create_database_session from api.core.database import Base +from api.core.settings import settings +from api.endpoints.dependencies.database import create_database_session from api.main import app as main_app AUTH_HEADER = {"Authorization": f"Bearer {settings.auth_token}"} @@ -53,13 +53,13 @@ def override_db_session(async_db_session: AsyncSession): async def _override_db_session(): yield async_db_session - return _override_db_session + yield _override_db_session @pytest.fixture() def app(override_db_session: Callable): main_app.dependency_overrides[create_database_session] = override_db_session - return main_app + yield main_app @pytest.fixture() diff --git a/tests/endpoints/test_reminders.py b/tests/endpoints/test_reminders.py index 30fe43d..cde3ace 100644 --- a/tests/endpoints/test_reminders.py +++ b/tests/endpoints/test_reminders.py @@ -8,7 +8,6 @@ from api.core.database.models.api.bot import Reminder, User from api.endpoints.reminder.reminder_schemas import ReminderResponse - pytestmark = pytest.mark.asyncio @@ -116,7 +115,7 @@ def yield_self(self): yield self @pytest.fixture(scope="function", autouse=True) - async def inject_config_reminder_deletion(self, async_db_session, yield_self: "TestReminderDeletion", client, + async def inject_config_reminder_deletion(self, async_db_session, yield_self, client, app): yield_self.app = app yield_self.client = client @@ -271,4 +270,6 @@ async def test_patch_updates_record(self, async_db_session: AsyncSession): response = await self.client.patch(url, json=self.test_data) await async_db_session.commit() assert response.status_code == 200 - assert (await async_db_session.execute(select(Reminder).filter_by(id=self.test_reminder.id))).scalars().first().content == self.test_data["content"] \ No newline at end of file + assert (await async_db_session.execute( + select(Reminder).filter_by(id=self.test_reminder.id) + )).scalars().first().content == self.test_data["content"] From aa9df4958988f4e52b173a42a1703770a53d2290 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Sat, 20 Nov 2021 22:40:37 +0100 Subject: [PATCH 06/10] Create test databases in a pytest fixture --- docker-compose.yaml | 2 -- postgres/init.sql | 1 - tests/conftest.py | 12 ++++++++---- 3 files changed, 8 insertions(+), 7 deletions(-) delete mode 100644 postgres/init.sql diff --git a/docker-compose.yaml b/docker-compose.yaml index eecbb76..5e8c0b4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -13,8 +13,6 @@ services: interval: 2s timeout: 1s retries: 5 - volumes: - - ./postgres/init.sql:/docker-entrypoint-initdb.d/init.sql web: build: context: . diff --git a/postgres/init.sql b/postgres/init.sql deleted file mode 100644 index eee2d3a..0000000 --- a/postgres/init.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE DATABASE test; \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 85253d3..a260705 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ import pytest from fastapi import FastAPI from httpx import AsyncClient +from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine from api.core.database import Base @@ -25,10 +26,13 @@ def event_loop(request) -> Generator: @pytest.fixture(scope="session") async def create_test_database_engine() -> Generator: - test_db_url = urlsplit(settings.database_url)._replace(path="/test") - engine = create_async_engine(urlunsplit(test_db_url), future=True) - yield engine - await engine.dispose() + async with test_engine.begin() as conn: + await conn.execute(text("CREATE DATABASE test;")) + test_db_url = urlsplit(settings.database_url)._replace(path="/test") + engine = create_async_engine(urlunsplit(test_db_url), future=True) + yield engine + await engine.dispose() + await conn.execute(text("DROP DATABASE test;")) @pytest.fixture() From a1b98e28ebe50fca6620b7f585991725d9d3d9c8 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Mon, 22 Nov 2021 18:32:11 +0100 Subject: [PATCH 07/10] Implement parallel testing using pytest-xdist --- .github/workflows/lint-test.yml | 2 +- poetry.lock | 91 +++++++++++++++++++++++---------- pyproject.toml | 1 + tests/conftest.py | 8 +-- 4 files changed, 71 insertions(+), 31 deletions(-) diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 9356703..d0f7a63 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -91,7 +91,7 @@ jobs: [flake8] %(code)s: %(text)s'" - name: Run pytest - run: pytest + run: pytest -n auto env: POSTGRES_HOST: localhost # Get the published port. diff --git a/poetry.lock b/poetry.lock index 611c4a9..72e7e44 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,7 +58,7 @@ test = ["pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0 name = "atomicwrites" version = "1.4.0" description = "Atomic file writes." -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -66,7 +66,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "attrs" version = "21.2.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -145,6 +145,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +testing = ["pre-commit"] + [[package]] name = "fastapi" version = "0.65.3" @@ -413,7 +424,7 @@ python-versions = ">=3.5" name = "iniconfig" version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -468,7 +479,7 @@ python-versions = "*" name = "packaging" version = "21.2" description = "Core utilities for Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -518,7 +529,7 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -530,7 +541,7 @@ testing = ["pytest", "pytest-benchmark"] name = "psutil" version = "5.8.0" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -541,7 +552,7 @@ test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -594,7 +605,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "pyparsing" version = "2.4.7" description = "Python parsing module" -category = "dev" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" @@ -602,7 +613,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" name = "pytest" version = "6.2.5" description = "pytest: simple powerful testing with Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -633,6 +644,37 @@ pytest = ">=5.4.0" [package.extras] testing = ["coverage", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-forked" +version = "1.3.0" +description = "run tests in isolated forked subprocesses" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +name = "pytest-xdist" +version = "2.4.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +execnet = ">=1.1" +psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""} +pytest = ">=6.0.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dotenv" version = "0.19.2" @@ -786,7 +828,7 @@ toml = ">=0.10.0,<0.11.0" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" @@ -861,7 +903,7 @@ python-versions = ">=3.7" [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "4509f1ab9b861aa6cb13e86308c461f13746f3e1fd4a8e2c8b5a59fec9faba97" +content-hash = "1c1730ba609457df1bc440ef2ebbb28152e7d8f69da4760ba3d3243b35b7ff08" [metadata.files] alembic = [ @@ -919,6 +961,10 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] fastapi = [ {file = "fastapi-0.65.3-py3-none-any.whl", hash = "sha256:d3e3c0ac35110efb22ee3ed28201cf32f9d11a9a0e52d7dd676cad25f5219523"}, {file = "fastapi-0.65.3.tar.gz", hash = "sha256:6ea2286e439c4ced7cce2b2862c25859601bf327a515c12dd6e431ef5d49d12f"}, @@ -1073,9 +1119,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, @@ -1087,9 +1130,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1101,9 +1141,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, @@ -1116,9 +1153,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -1131,9 +1165,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -1256,6 +1287,14 @@ pytest-asyncio = [ {file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"}, {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] +pytest-forked = [ + {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, + {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, +] +pytest-xdist = [ + {file = "pytest-xdist-2.4.0.tar.gz", hash = "sha256:89b330316f7fc475f999c81b577c2b926c9569f3d397ae432c0c2e2496d61ff9"}, + {file = "pytest_xdist-2.4.0-py3-none-any.whl", hash = "sha256:7b61ebb46997a0820a263553179d6d1e25a8c50d8a8620cd1aa1e20e3be99168"}, +] python-dotenv = [ {file = "python-dotenv-0.19.2.tar.gz", hash = "sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"}, {file = "python_dotenv-0.19.2-py2.py3-none-any.whl", hash = "sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3"}, diff --git a/pyproject.toml b/pyproject.toml index 8a3bd76..75b4088 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ asyncpg = "^0.24" alembic = "^1.6.5" uvicorn = {extras = ["standard"], version = "^0.14.0"} gunicorn = "^20.1.0" +pytest-xdist = {extras = ["psutil"], version = "^2.4.0"} [tool.poetry.dev-dependencies] flake8 = "^3.9.2" diff --git a/tests/conftest.py b/tests/conftest.py index a260705..547abfb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,14 +25,14 @@ def event_loop(request) -> Generator: @pytest.fixture(scope="session") -async def create_test_database_engine() -> Generator: +async def create_test_database_engine(worker_id) -> Generator: async with test_engine.begin() as conn: - await conn.execute(text("CREATE DATABASE test;")) - test_db_url = urlsplit(settings.database_url)._replace(path="/test") + await conn.execute(text(f"CREATE DATABASE {worker_id}_test;")) + test_db_url = urlsplit(settings.database_url)._replace(path=f"/{worker_id}_test") engine = create_async_engine(urlunsplit(test_db_url), future=True) yield engine await engine.dispose() - await conn.execute(text("DROP DATABASE test;")) + await conn.execute(text(f"DROP DATABASE {worker_id}_test;")) @pytest.fixture() From 8404227aea70d1bcf158de889a14c19f49e590cb Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Tue, 23 Nov 2021 17:26:27 +0100 Subject: [PATCH 08/10] Add support to run tests in docker - This commit adds a task that runs the tests inside docker - It also introduces changes to the README regarding the instructions of how to run tests. --- Dockerfile | 7 ++++--- README.md | 14 ++++++++++++++ docker-compose.yaml | 3 +++ pyproject.toml | 3 ++- 4 files changed, 23 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9a70e27..025c521 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,11 +28,12 @@ WORKDIR $INSTALL_DIR COPY "pyproject.toml" "poetry.lock" ./ RUN poetry install --no-dev -FROM base as development -WORKDIR $APP_DIR +FROM builder as development ENV FASTAPI_ENV=development COPY --from=builder $INSTALL_DIR $INSTALL_DIR - +WORKDIR $INSTALL_DIR +RUN poetry install +WORKDIR $APP_DIR COPY . . CMD ["sh", "-c", "alembic upgrade head && uvicorn api.main:app --host 0.0.0.0 --port 8000 --reload"] diff --git a/README.md b/README.md index 423cbf8..b8044dd 100644 --- a/README.md +++ b/README.md @@ -29,3 +29,17 @@ Another option is by using [Docker](https://www.docker.com/). After installing D With the project running in docker, open another terminal and run `poetry run task revision "Migration message here."` This will create a migration file in the path `alembic/versions`. Make sure to check it over, and fix any linting issues. +### Running tests +In order to run the tests, you need to have a PostgreSQL database up and running. +The easiest (and currently supported) way to do this is using Docker and docker-compose: + +First you have to start the project: +``` +docker-compose up +``` + +Then, when everything is set, you can just simply run: +``` +poetry run task test +``` +That will automatically run the tests inside a Docker container. diff --git a/docker-compose.yaml b/docker-compose.yaml index 5e8c0b4..f391ca6 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,6 +27,9 @@ services: volumes: - .:/app:ro - ./alembic/versions:/app/alembic/versions + # Pytest-cache dependencies + - ./.pytest_cache:/app/.pytest_cache + - ./.hypothesis:/app/.hypothesis environment: database_url: postgresql+asyncpg://pydisapi:pydisapi@postgres:5432/pydisapi auth_token: "my_token" diff --git a/pyproject.toml b/pyproject.toml index 75b4088..f04dc6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ asyncpg = "^0.24" alembic = "^1.6.5" uvicorn = {extras = ["standard"], version = "^0.14.0"} gunicorn = "^20.1.0" -pytest-xdist = {extras = ["psutil"], version = "^2.4.0"} [tool.poetry.dev-dependencies] flake8 = "^3.9.2" @@ -28,6 +27,7 @@ flake8-tidy-imports = "^4.3.0" pep8-naming = "^0.11.1" pytest = "^6.2.4" pytest-asyncio = "^0.15.1" +pytest-xdist = {extras = ["psutil"], version = "^2.4.0"} httpx = "^0.18.2" hypothesis = "^6.14.0" black = "^21.6b0" @@ -41,3 +41,4 @@ build-backend = "poetry.core.masonry.api" [tool.taskipy.tasks] lint = "pre-commit run --all-files" revision = "docker-compose exec web alembic revision --autogenerate -m" +test = "docker-compose exec web pytest" From 2f62bf5753f27ef986f82ab762f09d6a32d83054 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Wed, 24 Nov 2021 18:01:00 +0100 Subject: [PATCH 09/10] Add active field to the Reminder patch endpoint - This commit also patches a minor issue regarding how pydantic and FastAPI parses the `expiration` field when issuing a PATCH request. --- api/endpoints/reminder/reminder_endpoints.py | 1 + api/endpoints/reminder/reminder_schemas.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/api/endpoints/reminder/reminder_endpoints.py b/api/endpoints/reminder/reminder_endpoints.py index b62f378..3fee274 100644 --- a/api/endpoints/reminder/reminder_endpoints.py +++ b/api/endpoints/reminder/reminder_endpoints.py @@ -133,6 +133,7 @@ async def edit_reminders( All fields in the request body are optional. #### Request body >>> { + ... 'active': bool, ... 'mentions': list[int], ... 'content': str, ... 'expiration': str, # ISO-formatted datetime diff --git a/api/endpoints/reminder/reminder_schemas.py b/api/endpoints/reminder/reminder_schemas.py index 695e4be..5dc6bb5 100644 --- a/api/endpoints/reminder/reminder_schemas.py +++ b/api/endpoints/reminder/reminder_schemas.py @@ -46,9 +46,10 @@ class ReminderCreateIn(BaseModel): class ReminderPatchIn(BaseModel): """A model representing a batch of data what has to be updated on a Reminder.""" + active: Optional[bool] = Field(None) mentions: Optional[list[int]] = Field(None) content: Optional[str] = Field(None) - expiration: Optional[str] = Field(None) # ISO-formatted datetime + expiration: Optional[datetime] = Field(None) # ISO-formatted datetime failures: Optional[int] = Field(None) From 7a7f02096905e9a4fc5a6aca0a03ef6034364242 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Mon, 29 Nov 2021 21:53:50 +0100 Subject: [PATCH 10/10] Increase code consistency in conftest --- tests/conftest.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 547abfb..fd80aff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,6 +19,7 @@ @pytest.fixture(scope="session") def event_loop(request) -> Generator: + """Yields back an asyncio event loop, then closes it.""" loop = asyncio.get_event_loop() yield loop loop.close() @@ -26,6 +27,11 @@ def event_loop(request) -> Generator: @pytest.fixture(scope="session") async def create_test_database_engine(worker_id) -> Generator: + """ + Yields back a Database engine object + + Thix fixture will automatically create and delete the databases based on the current worker's id. + """ async with test_engine.begin() as conn: await conn.execute(text(f"CREATE DATABASE {worker_id}_test;")) test_db_url = urlsplit(settings.database_url)._replace(path=f"/{worker_id}_test") @@ -37,6 +43,12 @@ async def create_test_database_engine(worker_id) -> Generator: @pytest.fixture() async def async_db_session(create_test_database_engine: AsyncEngine) -> AsyncSession: + """ + Yields back an Asynchronous database session + + This fixture requests `create_test_database_engine` fixture, as its dependency, this way the session + will point to the database created using the current worker's id. + """ async with create_test_database_engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.create_all) @@ -45,15 +57,9 @@ async def async_db_session(create_test_database_engine: AsyncEngine) -> AsyncSes await session.close() -@pytest.fixture(scope="session", autouse=True) -async def global_teardown(create_test_database_engine: AsyncEngine): - yield - async with create_test_database_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) - - @pytest.fixture() def override_db_session(async_db_session: AsyncSession): + """Yields back the modified Database session that uses the correspondent Database""" async def _override_db_session(): yield async_db_session @@ -62,17 +68,20 @@ async def _override_db_session(): @pytest.fixture() def app(override_db_session: Callable): + """Yields back a patched version of the main app so that it uses the correspondent DB. session""" main_app.dependency_overrides[create_database_session] = override_db_session yield main_app @pytest.fixture() async def unauthed_client(app: FastAPI): + """Yields back an unauthenticated HTTP Client""" async with AsyncClient(app=app, base_url="http://testserver") as httpx_client: yield httpx_client @pytest.fixture() async def client(app: FastAPI): + """Yield back an authenticated HTTP Client""" async with AsyncClient(app=app, base_url="http://testserver", headers=AUTH_HEADER) as httpx_client: yield httpx_client