From 43d0c23045b4c6e2ae74692697c21f6eb554215e Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 10 Jul 2024 03:35:44 +0300 Subject: [PATCH 01/42] Update requirements --- pyproject.toml | 5 +++-- requirements-dev.txt | 1 + requirements.txt | 10 +++++++--- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d8ee947..37674b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,6 @@ documentation = "https://github.com/kksctf/yatb" # black config enabled. [tool.black] line-length = 120 -target_version = ['py311'] include = '\.pyi?$' exclude = ''' @@ -56,6 +55,7 @@ fix = false # python 3.10 target? target-version = "py311" +[tool.ruff.lint] task-tags = ["TODO", "FIXME", "WTF", "XXX"] # rules... @@ -75,6 +75,7 @@ ignore = [ "D202", # | pydocstyle | No blank lines allowed after function docstring (found 1) # don't like it "D203", # | pydocstyle | 1 blank line required before class docstring # don't like it "D205", # | pydocstyle | 1 blank line required between summary line and description # don't like it + "D212", "EM102", # | ruff? | Exception must not use an f-string literal, assign to variable first # i care, but not this proj "ERA001", # | ruff? | commented out code # i know. and what? "F401", # | pyflakes | %r imported but unused # pylance cover it @@ -86,5 +87,5 @@ ignore = [ [tool.pytest.ini_options] addopts = "--pyargs app --cov=app" -env = ["YATB_DEBUG=True", "ENABLE_METRICS=false"] +env = ["DEBUG=False", "ENABLE_METRICS=false", "TESTING=True"] filterwarnings = ["ignore::DeprecationWarning"] diff --git a/requirements-dev.txt b/requirements-dev.txt index 660ad42..264c231 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,3 +2,4 @@ pytest==7.1.2 pytest-cov==2.10.1 pytest-env==0.6.2 httpx==0.22.0 +ruff==0.5.1 diff --git a/requirements.txt b/requirements.txt index 17bce7d..fb8924f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -pydantic-settings==2.0.3 -pydantic==2.1.1 +pydantic-settings==2.14.5 +pydantic==2.8.2 fastapi==0.101.1 aiofiles==0.8.0 @@ -14,7 +14,11 @@ prometheus-fastapi-instrumentator==5.6.0 markupsafe==2.0.1 websockets==10.4 +beanie==1.26.0 + +fastui==0.6.0 + # beanie>=1.23.6,<2 -git+https://github.com/Rubikoid/beanie.git@encoder-fix +# git+https://github.com/Rubikoid/beanie.git@encoder-fix git+https://github.com/kksctf/formgen.git@master From e4d88a694fbba3052dcb23067cb5e48c76fb126a Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 10 Jul 2024 03:38:32 +0300 Subject: [PATCH 02/42] Update and fix tests. Disable `DEBUG` for tests (it's not good to run tests on __debug__ build), encapsulate things, needed by tests (like DB resetting shield) to another `TESTING` env variable. --- app/config.py | 4 +++- app/db/beanie.py | 9 +++++--- app/schema/auth/simple.py | 3 ++- app/test/__init__.py | 10 +++++++++ app/test/test_auth.py | 46 ++++++++++++++++++++++---------------- app/test/test_main.py | 10 ++------- app/test/test_tasks_api.py | 4 ++-- 7 files changed, 52 insertions(+), 34 deletions(-) diff --git a/app/config.py b/app/config.py index cb3d7f7..ff0ade9 100644 --- a/app/config.py +++ b/app/config.py @@ -15,6 +15,8 @@ class DefaultTokenError(ValueError): class Settings(BaseSettings): DEBUG: bool = False + TESTING: bool = False + PROFILING: bool = False TOKEN_PATH: str = "/api/users/login" @@ -65,7 +67,7 @@ class Settings(BaseSettings): @model_validator(mode="after") def check_non_default_tokens(self) -> Self: - if self.DEBUG: + if self.DEBUG or self.TESTING: return self token_check_list = ["JWT_SECRET_KEY", "FLAG_SIGN_KEY", "API_TOKEN", "WS_API_TOKEN"] diff --git a/app/db/beanie.py b/app/db/beanie.py index 2b1785e..58056e1 100644 --- a/app/db/beanie.py +++ b/app/db/beanie.py @@ -311,15 +311,18 @@ def __init__(self) -> None: async def init(self) -> None: self.client = AsyncIOMotorClient(str(settings.MONGO), tz_aware=True) self.db = self.client[settings.DB_NAME] - await init_beanie(database=self.db, document_models=[TaskDB, UserDB]) # type: ignore # bad library ;( + await init_beanie( + database=self.db, + document_models=[TaskDB, UserDB] + ) logger.info("Beanie init ok") async def close(self) -> None: logger.info("DB close ok") async def reset_db(self) -> None: - if not settings.DEBUG: - logger.warning("DB Reset without debug") + if not (settings.DEBUG or settings.TESTING): + logger.warning(f"DB Reset without debug ({settings.DEBUG = }) or testing {settings.TESTING = }") return await self.client.drop_database(settings.DB_NAME) diff --git a/app/schema/auth/simple.py b/app/schema/auth/simple.py index f3b7271..c76ca44 100644 --- a/app/schema/auth/simple.py +++ b/app/schema/auth/simple.py @@ -63,12 +63,13 @@ def check_password(self, model: "SimpleAuth.AuthModel") -> bool: def check_valid(self) -> bool: if settings.DEBUG: return True + if ( len(self.internal.username) < SimpleAuth.auth_settings.MIN_USERNAME_LEN or len(self.internal.username) > SimpleAuth.auth_settings.MAX_USERNAME_LEN ): return False - if len(self.internal.password) < SimpleAuth.auth_settings.MIN_PASSWORD_LEN: + if len(self.internal.password) < SimpleAuth.auth_settings.MIN_PASSWORD_LEN: # noqa: SIM103 return False return True diff --git a/app/test/__init__.py b/app/test/__init__.py index 60d026d..b73fdb3 100644 --- a/app/test/__init__.py +++ b/app/test/__init__.py @@ -1,6 +1,7 @@ # ruff: noqa: S101, S106, ANN201, T201 # this is a __test file__ import typing +from contextlib import contextmanager import pytest from fastapi.testclient import TestClient @@ -127,5 +128,14 @@ def client(request): client.__exit__() +@contextmanager +def enable_debug() -> typing.Generator[None, typing.Any, None]: + settings.DEBUG = True + try: + yield + finally: + settings.DEBUG = False + + # from . import test_auth # noqa # from . import test_main # noqa diff --git a/app/test/test_auth.py b/app/test/test_auth.py index 28126f5..3da0589 100644 --- a/app/test/test_auth.py +++ b/app/test/test_auth.py @@ -3,7 +3,7 @@ from fastapi import status from .. import config, schema -from . import ClientEx, app +from . import ClientEx, app, enable_debug from . import client as client_cl client = client_cl @@ -12,7 +12,7 @@ def test_register(client: ClientEx): - resp = client.simple_register_raw(username="Rubikoid", password="123") + resp = client.simple_register_raw(username="Rubikoid", password="123456789") assert resp.status_code == status.HTTP_200_OK, resp.text assert resp.text == '"ok"', resp.text @@ -20,16 +20,17 @@ def test_register(client: ClientEx): def test_login(client: ClientEx): test_register(client) - resp = client.simple_login_raw(username="Rubikoid", password="123") + resp = client.simple_login_raw(username="Rubikoid", password="123456789") assert resp.status_code == status.HTTP_200_OK, resp.text assert resp.text == '"ok"', resp.text def test_admin(client: ClientEx): - # config.settings.DEBUG = True - test_login(client) - # config.settings.DEBUG = False + # need to enable debug here, because `Rubikoid-as-default-admin` is debug feature + with enable_debug(): + test_login(client) + resp = client.get(app.url_path_for("api_admin_users_me")) # print(resp.json()) assert resp.status_code == status.HTTP_200_OK, resp.text @@ -37,40 +38,47 @@ def test_admin(client: ClientEx): assert resp.json()["username"] == "Rubikoid", resp.json() +def test_not_admin_without_debug(client: ClientEx): + test_login(client) + resp = client.get(app.url_path_for("api_admin_users_me")) + assert resp.status_code == status.HTTP_403_FORBIDDEN, resp.text + + def test_admin_fail(client: ClientEx): - resp1 = client.simple_register_raw(username="Not_Rubikoid", password="123") - assert resp1.status_code == status.HTTP_200_OK, resp1.text - assert resp1.text == '"ok"', resp1.text + with enable_debug(): + resp1 = client.simple_register_raw(username="Not_Rubikoid", password="123456789") + assert resp1.status_code == status.HTTP_200_OK, resp1.text + assert resp1.text == '"ok"', resp1.text - resp2 = client.simple_login_raw(username="Not_Rubikoid", password="123") - assert resp2.status_code == status.HTTP_200_OK, resp2.text - assert resp2.text == '"ok"', resp2.text + resp2 = client.simple_login_raw(username="Not_Rubikoid", password="123456789") + assert resp2.status_code == status.HTTP_200_OK, resp2.text + assert resp2.text == '"ok"', resp2.text - resp3 = client.get(app.url_path_for("api_admin_users_me")) - assert resp3.status_code == status.HTTP_403_FORBIDDEN, resp3.text + resp3 = client.get(app.url_path_for("api_admin_users_me")) + assert resp3.status_code == status.HTTP_403_FORBIDDEN, resp3.text def test_not_existing_user(client: ClientEx): resp1 = client.post( app.url_path_for("api_auth_simple_login"), - json=LoginForm(username="Not_Existing_Account", password="123").model_dump(mode="json"), + json=LoginForm(username="Not_Existing_Account", password="123456789").model_dump(mode="json"), ) assert resp1.status_code == status.HTTP_401_UNAUTHORIZED, resp1.text def test_invalid_password(client: ClientEx): - resp1 = client.simple_register_raw(username="Not_Rubikoid", password="123") + resp1 = client.simple_register_raw(username="Not_Rubikoid", password="123456789") assert resp1.status_code == status.HTTP_200_OK, resp1.text assert resp1.text == '"ok"', resp1.text - resp2 = client.simple_login_raw(username="Not_Rubikoid", password="1234") + resp2 = client.simple_login_raw(username="Not_Rubikoid", password="1234567890") assert resp2.status_code == status.HTTP_401_UNAUTHORIZED, resp2.text def test_register_existing_user(client: ClientEx): - resp1 = client.simple_register_raw(username="Not_Rubikoid", password="123") + resp1 = client.simple_register_raw(username="Not_Rubikoid", password="123456789") assert resp1.status_code == status.HTTP_200_OK, resp1.text assert resp1.text == '"ok"', resp1.text - resp2 = client.simple_register_raw(username="Not_Rubikoid", password="1234") + resp2 = client.simple_register_raw(username="Not_Rubikoid", password="1234567890") assert resp2.status_code == status.HTTP_403_FORBIDDEN, resp2.text diff --git a/app/test/test_main.py b/app/test/test_main.py index d5c280f..f295f18 100644 --- a/app/test/test_main.py +++ b/app/test/test_main.py @@ -1,15 +1,9 @@ -import uuid - -import pytest - -from .. import schema -from . import TestClient, app +from . import TestClient from . import client as client_cl -from . import test_auth client = client_cl -def test_read_main(client: TestClient): +def test_read_main(client: TestClient) -> None: resp = client.get("/") assert resp.status_code == 200 diff --git a/app/test/test_tasks_api.py b/app/test/test_tasks_api.py index da4f8f1..6267b7e 100644 --- a/app/test/test_tasks_api.py +++ b/app/test/test_tasks_api.py @@ -66,7 +66,7 @@ def test_task_create(client: ClientEx): def test_task_solve(client: ClientEx): - client.simple_register_raw(username="Rubikoid", password="123") + test_auth.test_admin(client) tasks: dict[int, schema.Task] = {} # fake array ;) tasks[0] = client.create_task( @@ -97,7 +97,7 @@ def test_task_solve(client: ClientEx): task = tasks[i] assert not task.hidden, f"{task = }" - client.simple_register_raw(username="Rubikoid_user", password="123") + client.simple_register_raw(username="Rubikoid_user", password="123456789") resp1 = client.solve_task_raw("test_task") assert resp1.status_code == status.HTTP_200_OK, resp1.text From 03832ea8b2587fc9fb822cb8a1654bd4bb65ea70 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 10 Jul 2024 04:56:26 +0300 Subject: [PATCH 03/42] Small refactoring in a few places --- app/api/admin/__init__.py | 8 ++++---- app/api/admin/admin_tasks.py | 3 ++- app/api/admin/admin_users.py | 36 ++++++++++++++++++------------------ app/api/api_users.py | 4 ++-- app/cli/cmd/__init__.py | 6 +++--- app/schema/ebasemodel.py | 2 +- app/schema/task.py | 14 +++++++++++++- 7 files changed, 43 insertions(+), 30 deletions(-) diff --git a/app/api/admin/__init__.py b/app/api/admin/__init__.py index 0cec0b3..0b1cd80 100644 --- a/app/api/admin/__init__.py +++ b/app/api/admin/__init__.py @@ -18,14 +18,14 @@ async def admin_checker( user: auth.CURR_USER_SAFE, token_header: str | None = Header(None, alias="X-Token"), token_query: str | None = Query(None, alias="token"), -) -> schema.User: +) -> UserDB: if user and user.is_admin: return user if token_header and token_header == settings.API_TOKEN: - return _fake_admin_user + return _fake_admin_user # type: ignore if token_query and token_query == settings.API_TOKEN: - return _fake_admin_user + return _fake_admin_user # type: ignore raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, @@ -33,7 +33,7 @@ async def admin_checker( ) -CURR_ADMIN = Annotated[schema.User, Depends(admin_checker)] +CURR_ADMIN = Annotated[UserDB, Depends(admin_checker)] logger = get_logger("api.admin") router = APIRouter( diff --git a/app/api/admin/admin_tasks.py b/app/api/admin/admin_tasks.py index ef467ff..2d154ce 100644 --- a/app/api/admin/admin_tasks.py +++ b/app/api/admin/admin_tasks.py @@ -1,5 +1,6 @@ import uuid -from typing import Annotated, Mapping +from collections.abc import Mapping +from typing import Annotated from beanie import BulkWriter from beanie.operators import Set diff --git a/app/api/admin/admin_users.py b/app/api/admin/admin_users.py index a58f230..75ece1f 100644 --- a/app/api/admin/admin_users.py +++ b/app/api/admin/admin_users.py @@ -16,7 +16,7 @@ async def api_admin_users_internal() -> Mapping[uuid.UUID, schema.User]: return all_users -async def api_admin_user_get_internal(user_id: uuid.UUID) -> UserDB: +async def get_user(user_id: uuid.UUID) -> UserDB: user = await UserDB.find_by_user_uuid(user_id) if not user: raise HTTPException( @@ -27,29 +27,33 @@ async def api_admin_user_get_internal(user_id: uuid.UUID) -> UserDB: return user +CURR_USER = Annotated[UserDB, Depends(get_user)] + + class PasswordChangeForm(BaseModel): new_password: str @router.get("/user/{user_id}") -async def api_admin_user(user_id: uuid.UUID, user: CURR_ADMIN) -> schema.User.admin_model: - ret_user = await api_admin_user_get_internal(user_id) - return ret_user +async def api_admin_user(admin: CURR_ADMIN, user: CURR_USER) -> schema.User.admin_model: + return user @router.post("/user/{user_id}") -async def api_admin_user_edit(new_user: schema.User, user_id: uuid.UUID, user: CURR_ADMIN) -> schema.User.admin_model: - new_user = await db.update_user_admin(user_id, new_user) +async def api_admin_user_edit(new_user: schema.User, user_id: uuid.UUID, admin: CURR_ADMIN) -> schema.User.admin_model: + # new_user = await db.update_user_admin(user_id, new_user) + raise Exception + return new_user @router.get("/users/me") -async def api_admin_users_me(user: CURR_ADMIN) -> schema.User.admin_model: - return user +async def api_admin_users_me(admin: CURR_ADMIN) -> schema.User.admin_model: + return admin @router.get("/users") -async def api_admin_users(user: CURR_ADMIN) -> Mapping[uuid.UUID, schema.User.admin_model]: +async def api_admin_users(admin: CURR_ADMIN) -> Mapping[uuid.UUID, schema.User.admin_model]: all_users = await api_admin_users_internal() return all_users @@ -58,7 +62,7 @@ async def api_admin_users(user: CURR_ADMIN) -> Mapping[uuid.UUID, schema.User.ad async def api_admin_user_edit_password( new_password: PasswordChangeForm, admin: CURR_ADMIN, - user: schema.User = Depends(api_admin_user_get_internal), + user: CURR_USER, ) -> schema.User.admin_model: au = user.auth_source if not isinstance(au, schema.auth.SimpleAuth.AuthModel): @@ -73,7 +77,7 @@ async def api_admin_user_edit_password( @router.get("/user/{user_id}/score") async def api_admin_user_recalc_score( admin: CURR_ADMIN, - user: UserDB = Depends(api_admin_user_get_internal), + user: CURR_USER, ) -> schema.User.admin_model: await user.recalc_score_one() return user @@ -82,18 +86,14 @@ async def api_admin_user_recalc_score( @router.delete("/user/{user_id}") async def api_admin_user_delete( admin: CURR_ADMIN, - user: schema.User = Depends(api_admin_user_get_internal), + user: CURR_USER, ) -> str: - if not user: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="user not exist", - ) + raise Exception if len(user.solved_tasks) > 0: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail="user have solved tasks", ) - await db.delete_user(user) + # await db.delete_user(user) return "deleted" diff --git a/app/api/api_users.py b/app/api/api_users.py index a022a33..fa0faf9 100644 --- a/app/api/api_users.py +++ b/app/api/api_users.py @@ -1,6 +1,6 @@ import uuid -from collections.abc import Sequence -from typing import Iterable, TypeVar +from collections.abc import Iterable, Sequence +from typing import TypeVar from fastapi import APIRouter, Depends, HTTPException, Request, Response, status diff --git a/app/cli/cmd/__init__.py b/app/cli/cmd/__init__.py index f8dd9a2..5849aa0 100644 --- a/app/cli/cmd/__init__.py +++ b/app/cli/cmd/__init__.py @@ -16,9 +16,9 @@ from . import load as load_cmds from . import stress as stress_cmds -get_cmds = get_cmds -stress_cmds = stress_cmds -load_cmds = load_cmds +get_cmds = get_cmds # noqa: PLW0127 +stress_cmds = stress_cmds # noqa: PLW0127 +load_cmds = load_cmds # noqa: PLW0127 tasks_to_create: list[RawTask] = [ RawTask( diff --git a/app/schema/ebasemodel.py b/app/schema/ebasemodel.py index b90d371..0204ffe 100644 --- a/app/schema/ebasemodel.py +++ b/app/schema/ebasemodel.py @@ -62,7 +62,7 @@ def build_model( # noqa: PLR0912, C901 # WTF: refactor & simplify new_union = Union[tuple(new_union_base)] # type: ignore # noqa: UP007, PGH003 # так надо. - target_fields[field_name] = ( + target_fields[field_name] = ( # type: ignore # WTF: ??? new_union, field_value, ) diff --git a/app/schema/task.py b/app/schema/task.py index 34d3b5b..a0036b0 100644 --- a/app/schema/task.py +++ b/app/schema/task.py @@ -3,7 +3,7 @@ from typing import Annotated, ClassVar from zoneinfo import ZoneInfo -from pydantic import Field +from pydantic import Field, computed_field from .. import config from ..config import settings @@ -52,6 +52,8 @@ class Task(EBaseModel): "description", "flag", "hidden", + "points", + "solves", } task_id: uuid.UUID = Field(default_factory=uuid.uuid4) @@ -147,6 +149,16 @@ def first_pwned_str(self) -> tuple[uuid.UUID, str]: def short_desc(self) -> str: return f"task_id={self.task_id} task_name={self.task_name} hidden={self.hidden} points={self.scoring.points}" + @computed_field + @property + def points(self) -> int: + return self.scoring.points + + @computed_field + @property + def solves(self) -> int: + return len(self.pwned_by) + class TaskForm(EBaseModel): task_name: str From d3a606f5fc44becffce7d35eab6df28d729e2177 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 10 Jul 2024 04:57:31 +0300 Subject: [PATCH 04/42] Add FastUI PoC --- app/__init__.py | 1 + app/view/admin/__init__.py | 40 +++++-- app/view/admin/ng.py | 172 ++++++++++++++++++++++++++++ app/view/templates/admin/base.jhtml | 1 + 4 files changed, 203 insertions(+), 11 deletions(-) create mode 100644 app/view/admin/ng.py diff --git a/app/__init__.py b/app/__init__.py index 49bd99a..b2d56e5 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -31,6 +31,7 @@ app.include_router(api.router) app.include_router(view.router) +app.include_router(view.admin.api_rotuer) # prometheus from prometheus_fastapi_instrumentator import Instrumentator # noqa diff --git a/app/view/admin/__init__.py b/app/view/admin/__init__.py index 912e317..fa63f9f 100644 --- a/app/view/admin/__init__.py +++ b/app/view/admin/__init__.py @@ -14,16 +14,21 @@ WebSocketException, status, ) +from fastapi.responses import HTMLResponse from fastapi.routing import APIRouter +from fastui import AnyComponent, FastUI, prebuilt_html +from fastui import components as c +from fastui.components.display import DisplayLookup, DisplayMode +from fastui.events import BackEvent, GoToEvent from ... import auth, config, schema -from ...api import api_tasks as api_tasks -from ...api import api_users as api_users -from ...api.admin import admin_checker +from ...api import api_tasks, api_users +from ...api.admin import CURR_ADMIN, admin_checker from ...api.admin import admin_tasks as api_admin_tasks from ...api.admin import admin_users as api_admin_users from ...utils.log_helper import get_logger from ...ws import ws_manager +from .ng import api_rotuer, base_router logger = get_logger("view") @@ -33,10 +38,11 @@ prefix="/admin", tags=["admin_view"], ) +router.include_router(base_router) @router.get("/") -async def admin_index(req: Request, resp: Response, user: schema.User = Depends(admin_checker)): +async def admin_index(req: Request, resp: Response, user: CURR_ADMIN): return await response_generator( req, "admin/index.jhtml", @@ -49,7 +55,7 @@ async def admin_index(req: Request, resp: Response, user: schema.User = Depends( @router.get("/tasks") -async def admin_tasks(req: Request, resp: Response, user: schema.User = Depends(admin_checker)): +async def admin_tasks(req: Request, resp: Response, user: CURR_ADMIN): tasks_list = await api_tasks.api_tasks_get(user) return await response_generator( req, @@ -66,7 +72,7 @@ async def admin_tasks(req: Request, resp: Response, user: schema.User = Depends( @router.get("/task/{task_id}") -async def admin_task_get(req: Request, resp: Response, task_id: uuid.UUID, user: schema.User = Depends(admin_checker)): +async def admin_task_get(req: Request, resp: Response, task_id: uuid.UUID, user: CURR_ADMIN): tasks_list = await api_tasks.api_tasks_get(user) selected_task = await api_tasks.api_task_get(task_id, user) return await response_generator( @@ -85,7 +91,7 @@ async def admin_task_get(req: Request, resp: Response, task_id: uuid.UUID, user: @router.get("/users") -async def admin_users(req: Request, resp: Response, user: schema.User = Depends(admin_checker)): +async def admin_users(req: Request, resp: Response, user: CURR_ADMIN): users_dict = await api_admin_users.api_admin_users_internal() return await response_generator( req, @@ -102,19 +108,18 @@ async def admin_users(req: Request, resp: Response, user: schema.User = Depends( @router.get("/user/{user_id}") -async def admin_user_get(req: Request, resp: Response, user_id: uuid.UUID, user: schema.User = Depends(admin_checker)): +async def admin_user_get(req: Request, resp: Response, admin: CURR_ADMIN, user: api_admin_users.CURR_USER): users_dict = await api_admin_users.api_admin_users_internal() - selected_user = await api_admin_users.api_admin_user_get_internal(user_id) return await response_generator( req, "admin/users_admin.jhtml", { "request": req, - "curr_user": user, + "curr_user": admin, "user_class": schema.User, # "user_form_class": schema.UserForm, "users_list": users_dict.values(), - "selected_user": selected_user, + "selected_user": user, }, ignore_admin=False, ) @@ -143,3 +148,16 @@ async def websocker_ep( await websocket.receive_text() except WebSocketDisconnect: ws_manager.disconnect(websocket) + + # return await response_generator( + # req, + # "admin/tasks_admin.jhtml", + # { + # "request": req, + # "curr_user": user, + # "task_class": schema.Task, + # "task_form_class": schema.TaskForm, + # "tasks_list": tasks_list, + # }, + # ignore_admin=False, + # ) diff --git a/app/view/admin/ng.py b/app/view/admin/ng.py new file mode 100644 index 0000000..8175fb2 --- /dev/null +++ b/app/view/admin/ng.py @@ -0,0 +1,172 @@ +import uuid + +from fastapi import ( + Cookie, + Depends, + FastAPI, + HTTPException, + Query, + Request, + Response, + WebSocket, + WebSocketDisconnect, + WebSocketException, + status, +) +from fastapi.responses import HTMLResponse +from fastapi.routing import APIRouter +from fastui import AnyComponent, FastUI, prebuilt_html +from fastui import components as c +from fastui.components.display import DisplayLookup, DisplayMode +from fastui.events import BackEvent, GoToEvent + +from ... import auth, config, schema +from ...api import api_tasks, api_users +from ...api.admin import CURR_ADMIN, admin_checker +from ...api.admin import admin_tasks as api_admin_tasks +from ...api.admin import admin_users as api_admin_users +from ...utils.log_helper import get_logger +from ...ws import ws_manager + +logger = get_logger("view") + +base_router = APIRouter( + prefix="/ng", + tags=["admin-ng"], +) + +api_rotuer = APIRouter( + prefix="/api/admin/ng", + tags=["admin-ng-api"], +) + + +def url_gen(req: Request, path: str) -> str: + return str(req.url_for("admin_ng_html_landing", path=path)) + + +def base_page(req: Request, *components: AnyComponent, title: str | None = None) -> list[AnyComponent]: + return [ + c.PageTitle(text=f"YATB Admin — {title if title else 'Root'}"), + c.Navbar( + title="YATB Admin", + title_event=GoToEvent(url=url_gen(req, "")), + start_links=[ + c.Link( + components=[c.Text(text="Tasks")], + on_click=GoToEvent(url=url_gen(req, "tasks")), + active="startswith:/tasks", + ), + c.Link( + components=[c.Text(text="Users")], + on_click=GoToEvent(url=url_gen(req, "users")), + active="startswith:/users", + ), + # c.Link( + # components=[c.Text(text="Components")], + # on_click=GoToEvent(url="/components"), + # active="startswith:/components", + # ), + ], + ), + c.Page( + components=[ + *((c.Heading(text=title),) if title else ()), + *components, + ], + ), + ] + + +@api_rotuer.get("", response_model=FastUI, response_model_exclude_none=True) +async def admin_ng_index(req: Request, admin: CURR_ADMIN) -> list[AnyComponent]: + return base_page(req, c.Text(text="...")) + + +@api_rotuer.get("/tasks", response_model=FastUI, response_model_exclude_none=True) +async def admin_ng_tasks(req: Request, admin: CURR_ADMIN) -> list[AnyComponent]: + tasks = await api_admin_tasks.api_admin_tasks(admin) + + return base_page( + req, + # c.ModelForm(submit_url="", model=schema.TaskForm), + c.Table( + data=[ + schema.Task.admin_model.model_validate(v.model_dump()) + for i, v in sorted(tasks.items(), key=lambda iv: iv[1].task_name) + ], + data_model=schema.Task.admin_model, + columns=[ + DisplayLookup(field="task_name", title="Name", on_click=GoToEvent(url=url_gen(req, "task/{task_id}"))), + DisplayLookup(field="category", title="Category"), + DisplayLookup(field="points", title="Points"), + DisplayLookup(field="solves", title="Solve Count"), + ], + ), + title="Tasks", + ) + + +@api_rotuer.get("/task/{task_id}", response_model=FastUI, response_model_exclude_none=True) +async def admin_ng_task(req: Request, admin: CURR_ADMIN, raw_task: api_admin_tasks.CURR_TASK) -> list[AnyComponent]: + sanitized_task = schema.Task.admin_model.model_validate(raw_task.model_dump()) + + return base_page( + req, + c.Heading(text=sanitized_task.task_name, level=2), + c.Link(components=[c.Text(text="Back")], on_click=BackEvent()), + c.Details( + data=sanitized_task, + # fields=[ + # DisplayLookup(field="task_id", title="ID"), + # ], + ), + title=f"Task - {sanitized_task.task_name}", + ) + + +@api_rotuer.get("/users", response_model=FastUI, response_model_exclude_none=True) +async def admin_ng_users(req: Request, admin: CURR_ADMIN) -> list[AnyComponent]: + users = await api_admin_users.api_admin_users(admin) + + return base_page( + req, + # c.ModelForm(submit_url="", model=schema.TaskForm), + c.Table( + data=[ + schema.User.admin_model.model_validate(v.model_dump()) + for i, v in sorted(users.items(), key=lambda iv: iv[1].username) + ], + data_model=schema.User.admin_model, + columns=[ + DisplayLookup(field="username", title="Name", on_click=GoToEvent(url=url_gen(req, "user/{user_id}"))), + DisplayLookup(field="is_admin", title="Admin"), + # DisplayLookup(field="points", title="Points"), + # DisplayLookup(field="solves", title="Solve Count"), + ], + ), + title="Users", + ) + + +@api_rotuer.get("/user/{user_id}", response_model=FastUI, response_model_exclude_none=True) +async def admin_ng_user(req: Request, admin: CURR_ADMIN, raw_user: api_admin_users.CURR_USER) -> list[AnyComponent]: + sanitized_user = schema.User.admin_model.model_validate(raw_user.model_dump()) + + return base_page( + req, + c.Heading(text=sanitized_user.username, level=2), + c.Link(components=[c.Text(text="Back")], on_click=BackEvent()), + c.Details( + data=sanitized_user, + # fields=[ + # DisplayLookup(field="task_id", title="ID"), + # ], + ), + title=f"User - {sanitized_user.username}", + ) + + +@base_router.get("/{path:path}") +async def admin_ng_html_landing() -> HTMLResponse: + return HTMLResponse(prebuilt_html(title="YATB admin..")) diff --git a/app/view/templates/admin/base.jhtml b/app/view/templates/admin/base.jhtml index bb4103d..643864a 100644 --- a/app/view/templates/admin/base.jhtml +++ b/app/view/templates/admin/base.jhtml @@ -7,6 +7,7 @@ {% set head_data.page_name = "YATB ADMIN" %} {% set head_data.pages = { + ("NG ADMIN",): url_for('admin_ng_html_landing', path=""), ("TASKS ADMIN",): url_for('admin_tasks'), ("USERS ADMIN",): url_for('admin_users'), ("Back to board",): url_for('index'), From 2346502c7c4badf09e66052189e9aaed2750f549 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Thu, 11 Jul 2024 02:39:14 +0300 Subject: [PATCH 05/42] Drop old db useless code --- app/db/__init__.py | 115 ------------------------------ app/db/beanie.py | 9 +-- app/db/db_tasks.py | 174 --------------------------------------------- app/db/db_users.py | 129 --------------------------------- 4 files changed, 3 insertions(+), 424 deletions(-) delete mode 100644 app/db/db_tasks.py delete mode 100644 app/db/db_users.py diff --git a/app/db/__init__.py b/app/db/__init__.py index 8ba57f2..e69de29 100644 --- a/app/db/__init__.py +++ b/app/db/__init__.py @@ -1,115 +0,0 @@ -import os -import pickle -from datetime import datetime - -from pydantic import BaseModel - -from .. import app, db, schema -from ..config import settings -from ..utils.log_helper import get_logger -from .beanie import TaskDB, UserDB - -logger = get_logger("db") - - -class FileDB: - _db = None - _index = None - - def __init__(self): - self.reset_db() - - def reset_db(self): - self._index = { - "tasks": {}, - "users": {}, - "short_urls": {}, - } - self._db = { - "tasks": {}, - "users": {}, - } - - def generate_index(self): - for i, v in self._db["tasks"].items(): - self._index["tasks"][v.task_id] = self.update_task(v) - - for i, v in self._db["users"].items(): - self._index["users"][v.user_id] = self.update_user(v) - - def update_task(self, task: schema.Task): - # regenerate markdown - task.description_html = schema.Task.regenerate_md(task.description) - - return task - - def update_user(self, user: schema.User): - # FIXME: говнокод & быстрофикс. - if isinstance(user.auth_source, dict): - original_au = user.auth_source - cls: schema.auth.AuthBase.AuthModel = getattr(schema.auth, user.auth_source["classtype"]).AuthModel - user.auth_source = cls.model_validate(user.auth_source) - logger.warning(f"Found & fixed broken auth source: {original_au} -> {user.auth_source}") - - # admin promote - if user.admin_checker() and not user.is_admin: - logger.warning(f"INIT: Promoting {user} to admin") - user.is_admin = True - - return user - - -_db = FileDB() - - -@app.on_event("startup") -async def startup_event(): - return - global _db - if settings.DB_NAME is None: - _db.reset_db() - logger.warning("TESTING_FileDB loaded") - return - - if not os.path.exists(settings.DB_NAME): - _db._db = { - "tasks": {}, - "users": {}, - } - else: - try: - with open(settings.DB_NAME, "rb") as f: - _db._db = pickle.load(f) - except Exception as ex: - _db._db = { - "tasks": {}, - "users": {}, - } - logger.error(f"Loading db exception, fallback to empty, {ex}") - - _db.generate_index() - logger.warning("FileDB loaded") - # logger.debug(f"FileDB: {_db._db}") - # logger.debug(f"FileDBIndex: {_db._index}") - - -@app.on_event("shutdown") -async def shutdown_event(): - return - global _db - if settings.DB_NAME is None: - return - save_path = settings.DB_NAME / "ressurect_db.db" if settings.DB_NAME.is_dir() else settings.DB_NAME - with open(settings.DB_NAME, "wb") as f: - pickle.dump(_db._db, f) - logger.warning("FileDB saved") - - -def update_entry(obj: BaseModel, data: dict): - for i in data: - if i in obj.__fields__: - setattr(obj, i, data[i]) - - -# from .db_tasks import * # noqa -# from .db_users import * # noqa diff --git a/app/db/beanie.py b/app/db/beanie.py index 58056e1..7dc9455 100644 --- a/app/db/beanie.py +++ b/app/db/beanie.py @@ -311,10 +311,7 @@ def __init__(self) -> None: async def init(self) -> None: self.client = AsyncIOMotorClient(str(settings.MONGO), tz_aware=True) self.db = self.client[settings.DB_NAME] - await init_beanie( - database=self.db, - document_models=[TaskDB, UserDB] - ) + await init_beanie(database=self.db, document_models=[TaskDB, UserDB]) logger.info("Beanie init ok") async def close(self) -> None: @@ -329,12 +326,12 @@ async def reset_db(self) -> None: @app.on_event("startup") -async def startup_event(): +async def startup_event() -> None: await db.init() @app.on_event("shutdown") -async def shutdown_event(): +async def shutdown_event() -> None: await db.close() diff --git a/app/db/db_tasks.py b/app/db/db_tasks.py deleted file mode 100644 index cc62900..0000000 --- a/app/db/db_tasks.py +++ /dev/null @@ -1,174 +0,0 @@ -import datetime -import logging -import uuid -from asyncio import Lock -from typing import Dict, Union - -from .. import schema -from ..config import settings -from ..utils import metrics -from ..utils.log_helper import get_logger -from . import db_users, update_entry - -# import markdown2 - - -logger = get_logger("db.tasks") -db_lock = Lock() - - -async def get_task_uuid(uuid: uuid.UUID) -> schema.Task: - from . import _db - - if uuid in _db._index["tasks"]: - return _db._index["tasks"][uuid] - - -async def get_all_tasks() -> Dict[uuid.UUID, schema.Task]: - from . import _db - - return _db._index["tasks"] - - -async def check_task_uuid(uuid: uuid.UUID) -> bool: - from . import _db - - return uuid in _db._index["tasks"] - - -async def insert_task(new_task: schema.TaskForm, author: schema.User) -> schema.Task: - from . import _db - - # task = schema.Task.parse_obj(new_task) # WTF: SHITCODE - task = schema.Task( - task_name=new_task.task_name, - category=new_task.category, - scoring=new_task.scoring, - description=new_task.description, - description_html=schema.Task.regenerate_md(new_task.description), - flag=new_task.flag, - author=(new_task.author if new_task.author != "" else f"@{author.username}"), - ) - - _db._db["tasks"][task.task_id] = task - _db._index["tasks"][task.task_id] = task - return task - - -async def update_task(task: schema.Task, new_task: schema.Task) -> schema.Task: - from . import _db - - logger.debug(f"Update task {task} to {new_task}") - - update_entry( - task, - new_task.dict( - exclude={ - "task_id", - "description_html", - "scoring", - "flag", - "pwned_by", - } - ), - ) - task.scoring = new_task.scoring # fix for json-ing scoring on edit - task.flag = new_task.flag # fix for json-ing flag on edit - - logger.debug(f"Resulting task={task}") - task.description_html = schema.Task.regenerate_md(task.description) - return task - - -async def remove_task(task: schema.Task): - from . import _db - - # TODO: recalc score and something else. - await unsolve_task(task) - del _db._db["tasks"][task.task_id] - del _db._index["tasks"][task.task_id] - - -async def find_task_by_flag(flag: str, user: schema.User) -> Union[schema.Task, None]: - from . import _db - - for task_id, task in _db._db["tasks"].items(): - task: schema.Task # strange solution, but no other ideas - if task.flag.flag_checker(flag, user): - return task - - return None - - -async def solve_task(task: schema.Task, solver: schema.User): - if solver.is_admin and not settings.DEBUG: # if you admin, you can't solve task. - return task.task_id - - if datetime.datetime.now(tz=datetime.UTC) > settings.EVENT_END_TIME: - return task.task_id - - # WTF: UNTEDTED: i belive this will work as a monkey patch for rAcE c0nDiTioN - global db_lock - async with db_lock: - # add references - solv_time = datetime.datetime.now(tz=datetime.UTC) - solver.solved_tasks[task.task_id] = solv_time - task.pwned_by[solver.user_id] = solv_time - - # get previous score - prev_score = task.scoring.points - solver.score += prev_score - - # if do_recalc, recalc all the scoreboard... only users, who solved task - do_recalc = task.scoring.solve_task() - if do_recalc: - new_score = task.scoring.points - diff = prev_score - new_score - logger.info(f"Solve task: {task.short_desc()}, oldscore={prev_score}, newscore={new_score}, diff={diff}") - for solver_id in task.pwned_by: - solver_recalc = await db_users.get_user_uuid(solver_id) - solver_recalc.score -= diff - metrics.score_per_user.labels(user_id=solver_recalc.user_id, username=solver_recalc.username).set( - solver_recalc.score - ) - - return task.task_id - - -async def unsolve_task(task: schema.Task) -> schema.Task: - # add references - global db_lock - async with db_lock: - task.pwned_by.clear() - # TODO: оптимизировать эту ебатеку - for _, user in (await db_users.get_all_users()).items(): - if task.task_id in user.solved_tasks: - user.solved_tasks.pop(task.task_id) - # task.scoring - - await recalc_scoreboard() - return task - - -async def recalc_user_score(user: schema.User, _task_cache: Dict[uuid.UUID, schema.Task] = None): - if _task_cache is None: - _task_cache = {} - old_score = user.score - user.score = 0 - for task_id in user.solved_tasks: - if task_id not in _task_cache: - _task_cache[task_id] = await get_task_uuid(task_id) - if _task_cache[task_id] is None: - continue - user.score += _task_cache[task_id].scoring.points - _task_cache[task_id].scoring.set_solves(len(_task_cache[task_id].pwned_by)) - if old_score != user.score: - logger.warning(f"Recalc: smth wrong with {user.short_desc()}, {old_score} != {user.score}!") - - -async def recalc_scoreboard(): - _task_cache: Dict[uuid.UUID, schema.Task] = {} - global db_lock - async with db_lock: - for _, user in (await db_users.get_all_users()).items(): - await recalc_user_score(user, _task_cache) diff --git a/app/db/db_users.py b/app/db/db_users.py deleted file mode 100644 index e58347a..0000000 --- a/app/db/db_users.py +++ /dev/null @@ -1,129 +0,0 @@ -from app.db import update_entry -import uuid -import logging -from typing import Hashable, List, Dict, Optional, Type - -from .. import schema -from ..utils.log_helper import get_logger - -logger = get_logger("db.users") - -# logger.debug(f"GlobalUsers, FileDB: {_db}") - - -async def get_user(username: str) -> Optional[schema.User]: - from . import _db - - for i in _db._index["users"]: - if _db._index["users"][i].username == username: - return _db._index["users"][i] - return None - - -async def get_user_uuid(uuid: uuid.UUID) -> Optional[schema.User]: - from . import _db - - if uuid in _db._index["users"]: - return _db._index["users"][uuid] - - -async def get_user_uniq_field(base: Type[schema.auth.AuthBase.AuthModel], field: Hashable) -> schema.User: - from . import _db - - for i in _db._index["users"]: - if ( - type(_db._index["users"][i].auth_source) == base - and _db._index["users"][i].auth_source.get_uniq_field() == field - ): - return _db._index["users"][i] - return None - - -async def get_all_users() -> Dict[uuid.UUID, schema.User]: - from . import _db - - return _db._db["users"] - - -async def check_user(username: str) -> bool: - from . import _db - - for i in _db._index["users"]: - if _db._index["users"][i].username == username: - return True - return False - - -async def check_user_uuid(uuid: uuid.UUID) -> bool: - from . import _db - - return uuid in _db._index["users"] - - -async def check_user_uniq_field(base: Type[schema.auth.AuthBase.AuthModel], field: Hashable) -> bool: - from . import _db - - for i in _db._index["users"]: - if ( - type(_db._index["users"][i].auth_source) == base - and _db._index["users"][i].auth_source.get_uniq_field() == field - ): - return True - return False - - -async def insert_user(auth: schema.auth.TYPING_AUTH): - from . import _db - - # WTF: SHITCODE or not.... :thonk: - user = schema.User(auth_source=auth) - _db._db["users"][user.user_id] = user - _db._index["users"][user.user_id] = user - return user - - -""" -async def insert_oauth_user(oauth_id: int, username: str, country: str): - from . import _db - - # WTF: SHITCODE - user = schema.User( - username=username, - password_hash=None, - country=country, - oauth_id=oauth_id, - ) - _db._db["users"][user.user_id] = user - _db._index["users"][user.user_id] = user - return user -""" - - -async def update_user_admin(user_id: uuid.UUID, new_user: schema.User): - from . import _db - - user: schema.User = _db._index["users"][user_id] - logger.debug(f"Update user {user} to {new_user}") - - update_entry( - user, - new_user.dict( - exclude={ - "user_id", - "password_hash", - "score", - "solved_tasks", - "oauth_id", - } - ), - ) - # user.parse_obj(new_user) - logger.debug(f"Resulting user={user}") - return user - - -async def delete_user(user: schema.User): - from . import _db - - del _db._db["users"][user.user_id] - del _db._index["users"][user.user_id] From 141df5aed753c6ee7b4d7176048f6187ad38cf20 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Thu, 11 Jul 2024 02:40:37 +0300 Subject: [PATCH 06/42] A little refactor root of app --- app/__init__.py | 19 +------- app/main.py | 115 +++++++++++++++++++++++++----------------------- 2 files changed, 62 insertions(+), 72 deletions(-) diff --git a/app/__init__.py b/app/__init__.py index b2d56e5..320c3f2 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -5,7 +5,7 @@ from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates -from . import utils +from . import api, main, utils, view from .config import settings app = FastAPI( @@ -25,10 +25,7 @@ # for i in loggers: # print(f"LOGGER: {i}") -from . import api # noqa -from . import main # noqa -from . import view # noqa - +main.setup_utils(app) app.include_router(api.router) app.include_router(view.router) app.include_router(view.admin.api_rotuer) @@ -44,15 +41,3 @@ env_var_name="ENABLE_METRICS", ) instrumentator.instrument(app).expose(app, endpoint=expose_url) -# utils.metrics.bad_solves_per_user - -""" -@app.on_event("startup") -def startup_event(): - metrics.load_all_metrics() - - -@app.on_event("shutdown") -def shutdown_event(): - metrics.save_all_metrics() -""" diff --git a/app/main.py b/app/main.py index 61bd116..d917997 100644 --- a/app/main.py +++ b/app/main.py @@ -1,68 +1,73 @@ -from . import app, root_logger -from .api import api_tasks +import time + +from fastapi import FastAPI, Request + +from . import root_logger from .config import settings -""" -@app.middleware("http") -async def session_middleware(request: Request, call_next): - # start_time = time.time() + +async def simple_timing_middleware(request: Request, call_next): + start_time = time.time() response = await call_next(request) - # process_time = time.time() - start_time - # response.headers["X-Process-Time"] = str(process_time) + process_time = time.time() - start_time + response.headers["X-Process-Time"] = str(process_time) return response -""" -if settings.DEBUG: - try: - import fastapi # noqa - import pydantic # noqa - from asgi_server_timing import ServerTimingMiddleware # noqa # type: ignore - root_logger.warning("Timing debug loading") +def setup_utils(app: FastAPI): + if settings.DEBUG: + app.middleware("http")(simple_timing_middleware) + + try: + import fastapi # noqa + import pydantic # noqa + from asgi_server_timing import ServerTimingMiddleware # noqa # type: ignore + + root_logger.warning("Timing debug loading") - app.add_middleware( - ServerTimingMiddleware, - calls_to_track={ - "1deps": (fastapi.routing.solve_dependencies,), # type: ignore - "2main": (fastapi.routing.run_endpoint_function,), - # "3valid": (pydantic.fields.ModelField.validate,), - "4encode": (fastapi.encoders.jsonable_encoder,), # type: ignore - "5render": ( - fastapi.responses.JSONResponse.render, - fastapi.responses.ORJSONResponse.render, - fastapi.responses.HTMLResponse.render, - fastapi.responses.PlainTextResponse.render, - ), - # "6tasks": (api_tasks.api_tasks_get,), - # "6task": (api_tasks.api_task_get,), - }, - ) - root_logger.warning("Timing debug loaded") + app.add_middleware( + ServerTimingMiddleware, + calls_to_track={ + "1deps": (fastapi.routing.solve_dependencies,), # type: ignore + "2main": (fastapi.routing.run_endpoint_function,), + # "3valid": (pydantic.fields.ModelField.validate,), + "4encode": (fastapi.encoders.jsonable_encoder,), # type: ignore + "5render": ( + fastapi.responses.JSONResponse.render, + fastapi.responses.ORJSONResponse.render, + fastapi.responses.HTMLResponse.render, + fastapi.responses.PlainTextResponse.render, + ), + # "6tasks": (api_tasks.api_tasks_get,), + # "6task": (api_tasks.api_task_get,), + }, + ) + root_logger.warning("Timing debug loaded") - except ModuleNotFoundError: - root_logger.warning("No timing extensions found") + except ModuleNotFoundError: + root_logger.warning("No timing extensions found") -if settings.PROFILING: - try: - from fastapi import Request - from fastapi.responses import HTMLResponse - from pyinstrument import Profiler + if settings.PROFILING: + try: + from fastapi import Request + from fastapi.responses import HTMLResponse + from pyinstrument import Profiler - root_logger.warning("pyinstrument loading") + root_logger.warning("pyinstrument loading") - @app.middleware("http") - async def profile_request(request: Request, call_next): - profiling = request.query_params.get("profile", False) - if profiling: - profiler = Profiler(async_mode="enabled") # interval=settings.profiling_interval - profiler.start() - await call_next(request) - profiler.stop() - return HTMLResponse(profiler.output_html()) - else: - return await call_next(request) + @app.middleware("http") + async def profile_request(request: Request, call_next): + profiling = request.query_params.get("profile", False) + if profiling: + profiler = Profiler(async_mode="enabled") # interval=settings.profiling_interval + profiler.start() + await call_next(request) + profiler.stop() + return HTMLResponse(profiler.output_html()) + else: + return await call_next(request) - root_logger.warning("pyinstrument loaded") + root_logger.warning("pyinstrument loaded") - except ModuleNotFoundError: - root_logger.warning("No pyinstrument found") + except ModuleNotFoundError: + root_logger.warning("No pyinstrument found") From f09de6fb1c7fa40d523057708967cce26cc0faea Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sun, 14 Jul 2024 20:43:00 +0300 Subject: [PATCH 07/42] Init dynamic_tasks_app - microservice for dynamic task manage Achive containger building --- dynamic_tasks_app/__init__.py | 50 ++++ dynamic_tasks_app/__main__.py | 28 +++ dynamic_tasks_app/config.py | 59 +++++ dynamic_tasks_app/connectors/__init__.py | 43 ++++ dynamic_tasks_app/connectors/kub.py | 216 ++++++++++++++++++ dynamic_tasks_app/readme.md | 72 ++++++ dynamic_tasks_app/tests/__init__.py | 0 .../tests/examples/builder/Dockerfile | 11 + .../tests/examples/builder/build.py | 15 ++ .../tests/examples/builder/requirements.txt | 4 + requirements-dynamic.txt | 12 + requirements.txt | 2 +- 12 files changed, 511 insertions(+), 1 deletion(-) create mode 100644 dynamic_tasks_app/__init__.py create mode 100644 dynamic_tasks_app/__main__.py create mode 100644 dynamic_tasks_app/config.py create mode 100644 dynamic_tasks_app/connectors/__init__.py create mode 100644 dynamic_tasks_app/connectors/kub.py create mode 100644 dynamic_tasks_app/readme.md create mode 100644 dynamic_tasks_app/tests/__init__.py create mode 100644 dynamic_tasks_app/tests/examples/builder/Dockerfile create mode 100644 dynamic_tasks_app/tests/examples/builder/build.py create mode 100644 dynamic_tasks_app/tests/examples/builder/requirements.txt create mode 100644 requirements-dynamic.txt diff --git a/dynamic_tasks_app/__init__.py b/dynamic_tasks_app/__init__.py new file mode 100644 index 0000000..f42d0fd --- /dev/null +++ b/dynamic_tasks_app/__init__.py @@ -0,0 +1,50 @@ +from contextlib import asynccontextmanager + +from fastapi import APIRouter, FastAPI + +from .config import settings +from .connectors import DynamicTaskInfo +from .connectors.kub import KubeConnector + +# WTF: tmp for dev +connector = KubeConnector() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + await connector.init() + try: + yield + finally: + await connector.close() + + +app = FastAPI( + lifespan=lifespan, +) + +# TODO: token check +router = APIRouter( + prefix="/api", + tags=["api"], +) + + +@router.post("/start") +async def api_start(task_info: DynamicTaskInfo): + return await connector.start(task_info) + + +@router.post("/stop") +async def api_stop(task_info: DynamicTaskInfo): + return await connector.stop(task_info) + + +@router.post("/restart") +async def api_restart(task_info: DynamicTaskInfo): + return await connector.restart(task_info) + + +@router.post("/info") +async def api_info(task_info: DynamicTaskInfo): + return await connector.info(task_info) diff --git a/dynamic_tasks_app/__main__.py b/dynamic_tasks_app/__main__.py new file mode 100644 index 0000000..368cf4b --- /dev/null +++ b/dynamic_tasks_app/__main__.py @@ -0,0 +1,28 @@ +import asyncio +from pathlib import Path + +from loguru import logger +from .connectors.kub import KubeConnector + + +async def test(x: KubeConnector): + # await x.test() + + src = Path("dynamic_tasks_app") / "tests" / "examples" / "builder" + src = src.resolve() + await x.api.build(src) + + # await x.test() + + +async def main(): + x = KubeConnector() + await x.init() + try: + await test(x) + finally: + await x.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/dynamic_tasks_app/config.py b/dynamic_tasks_app/config.py new file mode 100644 index 0000000..f77739a --- /dev/null +++ b/dynamic_tasks_app/config.py @@ -0,0 +1,59 @@ +from pathlib import Path +from typing import Self + +from pydantic import model_validator +from pydantic_settings import BaseSettings, SettingsConfigDict + +_DEFAULT_TOKEN = "default_token_CHANGE_ME" # noqa: S105 # intended + + +class DefaultTokenError(ValueError): + pass + + +class Settings(BaseSettings): + DEBUG: bool = False + TESTING: bool = False + + FLAG_SIGN_KEY: str = _DEFAULT_TOKEN + + DYNAMIC_TASKS_CONTROLLER_TOKEN: str | None = None + + KUBE_CONFIG_PATH: Path | None = None + + S3_HOST: str + S3_PORT: int = 80 + S3_ACCESS: str + S3_SECRET: str + + @property + def kube_config_path(self) -> str | None: + if not self.KUBE_CONFIG_PATH: + return None + + return str(self.KUBE_CONFIG_PATH.expanduser().resolve()) + + @property + def s3_endpoint(self) -> str: + return f"{self.S3_HOST}:{self.S3_PORT}" + + @model_validator(mode="after") + def check_non_default_tokens(self) -> Self: + if self.DEBUG or self.TESTING: + return self + + token_check_list = ["FLAG_SIGN_KEY"] + for token_name in token_check_list: + if getattr(self, token_name) == _DEFAULT_TOKEN: + raise DefaultTokenError(f"Field '{token_name}' have default token value") + + return self + + model_config = SettingsConfigDict( + env_file="yatb.env", + env_file_encoding="utf-8", + extra="allow", + ) + + +settings = Settings() # pyright: ignore[reportCallIssue] diff --git a/dynamic_tasks_app/connectors/__init__.py b/dynamic_tasks_app/connectors/__init__.py new file mode 100644 index 0000000..a81765a --- /dev/null +++ b/dynamic_tasks_app/connectors/__init__.py @@ -0,0 +1,43 @@ +from abc import ABC, abstractmethod +from enum import Enum +from uuid import UUID + +from pydantic import BaseModel + + +class DynamicTaskType(Enum): + BUILDER = "builder" + SERVICE = "service" + + +class DynamicTaskInfo(BaseModel): + descriptor: UUID + type: DynamicTaskType + + user_id: str + + +class BaseConnector(ABC): + @abstractmethod + async def init(self) -> None: + pass + + @abstractmethod + async def close(self) -> None: + pass + + @abstractmethod + async def start(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + @abstractmethod + async def stop(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + @abstractmethod + async def restart(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + @abstractmethod + async def info(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError diff --git a/dynamic_tasks_app/connectors/kub.py b/dynamic_tasks_app/connectors/kub.py new file mode 100644 index 0000000..a0950fd --- /dev/null +++ b/dynamic_tasks_app/connectors/kub.py @@ -0,0 +1,216 @@ +# +import asyncio +from pathlib import Path +import random +import string +import tarfile +import io + +from lightkube.config.kubeconfig import KubeConfig +from lightkube.core.async_client import AsyncClient +from lightkube.resources.core_v1 import Namespace, Pod +from lightkube.models.meta_v1 import ObjectMeta +from lightkube.models.core_v1 import PodSpec, Container, EnvVar +from loguru import logger +from miniopy_async import Minio + +from ..config import settings +from . import BaseConnector, DynamicTaskInfo + + +class ImpossibleError(Exception): + pass + + +# class WTF: +# api: kr8sa.Api + +# async def init(self) -> None: +# self.api = await kr8sa.api(kubeconfig=settings.kube_config_path) + +# async def test(self) -> None: +# logger.info(f"{await self.api.whoami() = }") +# logger.info("Listing pods with their IPs:") +# ret = await self.api.get("pods", namespace=kr8s.ALL) +# if not isinstance(ret, list): +# raise Exception("wtf") + +# for i in ret: +# logger.info(f"{i}") + + +class KubeApi: + BUILD_BUCKET_NAME: str = "dynamic-tasks-build-source" + # BUILD_NAMESPACE: str = "yatb-build-namespace" + BUILD_NAMESPACE: str = "default" + + client: AsyncClient + s3: Minio + + async def init(self) -> None: + # setup kube + config = KubeConfig.from_file(settings.kube_config_path) if settings.kube_config_path else None + self.client = AsyncClient(config) # type: ignore # lib broken + + # setup s3 + self.s3 = Minio( + endpoint=settings.s3_endpoint, + access_key=settings.S3_ACCESS, + secret_key=settings.S3_SECRET, + secure=False, # http for False, https for True + ) + + # setup buckets + await self.setup_s3() + + async def close(self) -> None: + await self.client.close() + + async def setup_s3(self) -> None: + if not await self.s3.bucket_exists(self.BUILD_BUCKET_NAME): + await self.s3.make_bucket(self.BUILD_BUCKET_NAME) + + @classmethod + def generate_name(cls, alphabet: str = string.digits + string.ascii_lowercase, n: int = 16) -> str: + return "".join(random.choices(alphabet, k=n)) # noqa: S311 + + async def build(self, source: Path, *, _base_ip: str = "10.42.0.1") -> None: + assert source.is_absolute() + assert source.is_dir() + assert (source / "Dockerfile").exists() + + build_name = self.generate_name() + build_name = "y4fchuw25jbrh0fz" + raw_img_name = f"{build_name}.tar.gz" + + with io.BytesIO() as buff: + with tarfile.open(fileobj=buff, mode="w:gz") as tar: + for file in source.iterdir(): + tar.add(file, arcname=file.relative_to(source)) # string absolute long path + buff.seek(0) # reset to 0. because... you knew. + + size = len(buff.getbuffer()) + await self.s3.put_object( + self.BUILD_BUCKET_NAME, + raw_img_name, + buff, + length=size, + ) + + logger.info( + f"Uploaded archive from {source} ({size = }) as 's3://{self.BUILD_BUCKET_NAME}/{raw_img_name}'", + ) + + kaniko = await self.client.create( + Pod( + metadata=ObjectMeta(name=f"kaniko-build-{build_name}", namespace=self.BUILD_NAMESPACE), + spec=PodSpec( + containers=[ + Container( + name="kaniko", + image="gcr.io/kaniko-project/executor:v1.23.2", + args=[ + "--dockerfile=/kaniko/buildcontext/Dockerfile", + f"--context=s3://{self.BUILD_BUCKET_NAME}/{raw_img_name}", + f"--destination={_base_ip}:5000/prebuild-images/{build_name}:latest", + "--cache=true", + "--cache-run-layers=true", + "--cache-copy-layers=true", + f"--cache-repo={_base_ip}:5000/cache", + ], + env=[ + EnvVar( + "S3_ENDPOINT", + value=f"http://{_base_ip}:{settings.S3_PORT}", + ), + # need to specify this to use path-stye minio, + # and don't try to resolve http://bucket.ip:port/file + EnvVar("S3_FORCE_PATH_STYLE", "true"), + # i have AWS. Don't work without this + EnvVar("AWS_REGION", "us-east-1"), # i have AWS + EnvVar("AWS_ACCESS_KEY_ID", settings.S3_ACCESS), + EnvVar("AWS_SECRET_ACCESS_KEY", settings.S3_SECRET), + ], + ), + ], + restartPolicy="Never", + ), + ), + ) + + if not kaniko.metadata or not kaniko.metadata.name or not kaniko.metadata.namespace: + raise ImpossibleError + + try: + kaniko = await self.client.wait( + Pod, + kaniko.metadata.name, + for_conditions=["PodReadyToStartContainers"], + namespace=kaniko.metadata.namespace, + ) + + if not kaniko.metadata or not kaniko.metadata.name or not kaniko.metadata.namespace or not kaniko.status: + raise ImpossibleError + + logger.info( + f"Kaniko pod created: '{kaniko.metadata.namespace}.{kaniko.metadata.name}'", + ) + + async for line in self.client.log( + kaniko.metadata.name, + namespace=kaniko.metadata.namespace, + follow=True, + newlines=False, + ): + logger.info(line) + + finally: + if not kaniko.metadata or not kaniko.metadata.name or not kaniko.metadata.namespace: + raise ImpossibleError + + await self.client.delete(Pod, kaniko.metadata.name, namespace=kaniko.metadata.namespace) + + async def test(self): + logger.info("Simple cluster status:") + async for ns in self.client.list(Namespace): + if not ns.metadata or not ns.metadata.name: + logger.warning(f"{ns = } no metadata or name") + continue + + logger.info(f"Found ns: {ns.metadata.name}") + + async for pod in self.client.list(Pod, namespace="*"): + if not pod.metadata or not pod.status: + logger.warning(f"{pod = } no metadata / status") + continue + + logger.info(f"{pod.metadata.namespace}.{pod.metadata.name}: {pod.status.podIPs}") + + +class KubeConnector(BaseConnector): + api: KubeApi + + def __init__(self) -> None: + self.api = KubeApi() + super().__init__() + + async def init(self) -> None: + await self.api.init() + + async def test(self) -> None: + await self.api.test() + + async def close(self) -> None: + await self.api.close() + + async def start(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + async def stop(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + async def restart(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + async def info(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError diff --git a/dynamic_tasks_app/readme.md b/dynamic_tasks_app/readme.md new file mode 100644 index 0000000..46db305 --- /dev/null +++ b/dynamic_tasks_app/readme.md @@ -0,0 +1,72 @@ +# dynamic tasks + +## ТЗ + +Динамические таски бывают двух видов: + +- Файловы: просто генерируем `N` файлов и отдаем их юзеру +- Сервисные + - https (http можно, но лучше https) - это надо заворачивать через умную проксю, типа traefik / caddy (страшно представить сколько проксей будет между клиентом и сервисом) + - tcp, в целом может вырождаться в http - тут тоже надо заворачивать, но через тупую проксю, + всё равно нужно выдавать уникальные порты + +(замиксовать файловую и сервисную таску ~~в своём стиле~~, имхо, идея говна, поэтому такой возможности не будет) - ладно, не совсем говна. Подумаю, как допилить. + +Разработчик таски предоставляет либо: + +- для файловых тасок: `Dockerfile`, который при билде кладет какие-то артефакты в условный `/export` -> эти артефакты будут экспортиться оттуда человеку. +- для сервисных тасок: `docker-compose.yaml`, в котором экспозится какой-то порт, плюс описание типа порта - http(s) / tcp (для начала можно сделать простой tcp) + +Инфраструктура динамических тасок состоит из: + +- docker registry - тут храним пребилдженные контейнеры, чтобы не собирать их полностью каждый раз, и для файловых, и для сервисных тасок. +- оркестратора контейнеров (docker swarm / k8s-like api / nomad) +- minio (or other s3-like api) - тут храним статику, сбилдженную для файловых тасок (плюс можно хранить в целом статику для любых тасок, даже тех, которые не параметризуются) +- какая-то штука, стощая впереди всего этого великолепия и роутящая траф + +flow юзера&таски такой: + +1. идет на борду, открывает таск +2. жмет кнопку "сгенерить / запустить" +3. Ждет. +4. получает либо: + 1. линк на архив со статикой, + 2. либо http(s) линк на сервис, - в этом случае для команды должен генериться уникальный хостнейм, типа `{random-determenistic-shit}.tasks.yatb.local` + 3. либо http/tcp адрес с уникальным портом - имеет смысл тоже поабузить днс, но в крайнем случае достаточно простых айпишников. +5. Решает таску. +6. Кэш для файловых тасок храним всю цтфку, генерируя один раз. +7. Сервисные таски уничтожаем через таймаут после сдачи флага, либо после определенного времени, типа, пары часов. Можно сделать уведомление/подтвердение того, что таска ещё решается? + +Вдохновление ещё тут можно посмотреть: + +## Декомпозиция на задачки + +1. [ ] Для ~~первой~~ MVP версии надо выбрать какой-то один оркестратор, от которого у меня не отвалится жопа. Варианты: + 1. [ ] Docker Swarm - он простой, но не очень фичастый. + 1. [ ] Научиться поднимать хоть в каком-то виде + 2. [ ] Научиться билдить контейнеры + 3. [ ] Научиться запускать контейнеры + 4. [ ] Научиться запускать композы + 5. [ ] Научиться пробрасывать порты наружу + 2. [ ] k8s-like api (k3s in my case). + 1. [ ] Понять, как вообще этот ебучий кубер работает + 2. [x] Научиться поднимать хоть в каком-то виде + 3. [x] Научиться билдить контейнеры + 4. [ ] Научиться запускать контейнеры + 5. [ ] Научиться запускать композы + 6. [ ] Научиться пробрасывать порты наружу + 3. [ ] nomad + 1. [ ] Понять, как работает номад + 2. [ ] Научиться поднимать хоть в каком-то виде + 3. [ ] Научиться билдить контейнеры + 4. [ ] Научиться запускать контейнеры + 5. [ ] Научиться запускать композы + 6. [ ] Научиться пробрасывать порты наружу + 4. [ ] self-written thing???? (проклято, точно нет) + 1. [ ] die +2. [ ] Научиться пробрасывать порты (сервисы) наружу в общем случае + 1. [ ] Пробросить minio + 2. [ ] Научиться пробрасывать tcp-порты +3. [ ] Научиться собирать файловые таски + 1. [ ] Нужно как-то собирать контейнеры, при этом желательно не отломать себе жопу: - мб поможет +4. [ ] Всякий bolerplate yatb <-> dynamic_tasks_controller + 1. [x] Самый базовый - написал diff --git a/dynamic_tasks_app/tests/__init__.py b/dynamic_tasks_app/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dynamic_tasks_app/tests/examples/builder/Dockerfile b/dynamic_tasks_app/tests/examples/builder/Dockerfile new file mode 100644 index 0000000..672d9c2 --- /dev/null +++ b/dynamic_tasks_app/tests/examples/builder/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY requirements.txt ./ + +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD [ "python", "/app/build.py" ] diff --git a/dynamic_tasks_app/tests/examples/builder/build.py b/dynamic_tasks_app/tests/examples/builder/build.py new file mode 100644 index 0000000..d9cd911 --- /dev/null +++ b/dynamic_tasks_app/tests/examples/builder/build.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +# just a very simple builder + +from cyclopts import App + +app = App("example task builder") + + +@app.command() +def main(): + print("Hello!") + + +if __name__ == "__main__": + app() diff --git a/dynamic_tasks_app/tests/examples/builder/requirements.txt b/dynamic_tasks_app/tests/examples/builder/requirements.txt new file mode 100644 index 0000000..27e57af --- /dev/null +++ b/dynamic_tasks_app/tests/examples/builder/requirements.txt @@ -0,0 +1,4 @@ +cyclopts==2.9.3 +rich==13.7.1 +httpx==0.22.0 +pydantic-yaml==1.2.0 diff --git a/requirements-dynamic.txt b/requirements-dynamic.txt new file mode 100644 index 0000000..120f261 --- /dev/null +++ b/requirements-dynamic.txt @@ -0,0 +1,12 @@ +# kr8s==0.17.0 +lightkube==0.15.3 + +miniopy-async==1.20.1 + +pydantic-settings==2.3.4 +pydantic==2.8.2 +fastapi==0.101.1 + +uvicorn==0.17.6 + +loguru==0.7.2 diff --git a/requirements.txt b/requirements.txt index fb8924f..f804bcf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -pydantic-settings==2.14.5 +pydantic-settings==2.3.4 pydantic==2.8.2 fastapi==0.101.1 From bcf4b36064ff37ae88a61e0ed5e39c99452b0a0d Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 16 Jul 2024 21:00:44 +0300 Subject: [PATCH 08/42] Update readme.md --- dynamic_tasks_app/readme.md | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/dynamic_tasks_app/readme.md b/dynamic_tasks_app/readme.md index 46db305..73e7184 100644 --- a/dynamic_tasks_app/readme.md +++ b/dynamic_tasks_app/readme.md @@ -9,11 +9,12 @@ - https (http можно, но лучше https) - это надо заворачивать через умную проксю, типа traefik / caddy (страшно представить сколько проксей будет между клиентом и сервисом) - tcp, в целом может вырождаться в http - тут тоже надо заворачивать, но через тупую проксю, + всё равно нужно выдавать уникальные порты -(замиксовать файловую и сервисную таску ~~в своём стиле~~, имхо, идея говна, поэтому такой возможности не будет) - ладно, не совсем говна. Подумаю, как допилить. +(замиксовать файловую и сервисную таску ~~в своём стиле~~, имхо, идея не очень, поэтому такой возможности не будет) - ладно, не совсем плохая. +Подумаю, как это можно допилить. Разработчик таски предоставляет либо: -- для файловых тасок: `Dockerfile`, который при билде кладет какие-то артефакты в условный `/export` -> эти артефакты будут экспортиться оттуда человеку. +- для файловых тасок: `Dockerfile`, который собирает контейнер, при запуске которого генерируются файлики таска куда-нибудь в специальную директорию, типа `/export`, которая, в свою очередь, архивируется и отдается пользователю. - для сервисных тасок: `docker-compose.yaml`, в котором экспозится какой-то порт, плюс описание типа порта - http(s) / tcp (для начала можно сделать простой tcp) Инфраструктура динамических тасок состоит из: @@ -41,28 +42,27 @@ flow юзера&таски такой: ## Декомпозиция на задачки 1. [ ] Для ~~первой~~ MVP версии надо выбрать какой-то один оркестратор, от которого у меня не отвалится жопа. Варианты: - 1. [ ] Docker Swarm - он простой, но не очень фичастый. - 1. [ ] Научиться поднимать хоть в каком-то виде - 2. [ ] Научиться билдить контейнеры - 3. [ ] Научиться запускать контейнеры - 4. [ ] Научиться запускать композы - 5. [ ] Научиться пробрасывать порты наружу - 2. [ ] k8s-like api (k3s in my case). - 1. [ ] Понять, как вообще этот ебучий кубер работает + 1. [ ] k8s-like api (k3s in my case). + 1. [x] Очень базово понять, как кубер функционирует 2. [x] Научиться поднимать хоть в каком-то виде 3. [x] Научиться билдить контейнеры 4. [ ] Научиться запускать контейнеры 5. [ ] Научиться запускать композы 6. [ ] Научиться пробрасывать порты наружу + 2. [ ] Docker Swarm - он простой, но не очень фичастый. + 1. [ ] Научиться поднимать хоть в каком-то виде + 2. [ ] Научиться билдить контейнеры + 3. [ ] Научиться запускать контейнеры + 4. [ ] Научиться запускать композы + 5. [ ] Научиться пробрасывать порты наружу 3. [ ] nomad 1. [ ] Понять, как работает номад 2. [ ] Научиться поднимать хоть в каком-то виде - 3. [ ] Научиться билдить контейнеры + 3. [ ] Научиться билдить контейнеры 4. [ ] Научиться запускать контейнеры 5. [ ] Научиться запускать композы 6. [ ] Научиться пробрасывать порты наружу - 4. [ ] self-written thing???? (проклято, точно нет) - 1. [ ] die + 4. [ ] self-written thing? Очень сложно и долго 2. [ ] Научиться пробрасывать порты (сервисы) наружу в общем случае 1. [ ] Пробросить minio 2. [ ] Научиться пробрасывать tcp-порты From 8ed3b68c1088b97a03d35b9faee7b8107c77f122 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 23 Jul 2024 00:43:49 +0300 Subject: [PATCH 09/42] Add pydantic-yaml parser --- dynamic_tasks_app/utils/pydantic_yaml.py | 102 +++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 dynamic_tasks_app/utils/pydantic_yaml.py diff --git a/dynamic_tasks_app/utils/pydantic_yaml.py b/dynamic_tasks_app/utils/pydantic_yaml.py new file mode 100644 index 0000000..560b32b --- /dev/null +++ b/dynamic_tasks_app/utils/pydantic_yaml.py @@ -0,0 +1,102 @@ +# https://github.com/NowanIlfideme/pydantic-yaml/blob/06b75137860ec6ad834b397a474fab7db00140ee/src/pydantic_yaml/_internals/v2.py +# MIT License + +# Copyright (c) 2020 Anatoly Makarevich + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + + +from io import BytesIO, IOBase, StringIO +from pathlib import Path +from typing import Any, TypeVar + +from pydantic import BaseModel, TypeAdapter +from ruamel.yaml import YAML + +T = TypeVar("T", bound=BaseModel) + + +def parse_yaml_raw_as( + model_type: type[T], + raw: str | bytes | IOBase, + *, + context: dict[str, Any] | None = None, +) -> T: + """ + Parse raw YAML string as the passed model type. + + Parameters + ---------- + model_type : Type[BaseModel] + The resulting model type. + raw : str or bytes or IOBase + The YAML string or stream. + context : dict[str, Any] + pass to validate_python + + """ + + stream: IOBase + if isinstance(raw, str): + stream = StringIO(raw) + elif isinstance(raw, bytes): + stream = BytesIO(raw) + elif isinstance(raw, IOBase): + stream = raw + else: + raise TypeError(f"Expected str, bytes or IO, but got {raw!r}") + reader = YAML(typ="safe", pure=True) # YAML 1.2 support + objects = reader.load(stream) + ta = TypeAdapter(model_type) # type: ignore + return ta.validate_python(objects, context=context) + + +def parse_yaml_file_as( + model_type: type[T], + file: Path | str | IOBase, + *, + context: dict[str, Any] | None = None, +) -> T: + """ + Parse YAML file as the passed model type. + + Parameters + ---------- + model_type : Type[BaseModel] + The resulting model type. + file : Path or str or IOBase + The file path or stream to read from. + context : dict[str, Any] + pass to validate_python + + """ + + # Short-circuit + if isinstance(file, IOBase): + return parse_yaml_raw_as(model_type, raw=file) + + if isinstance(file, str): + file = Path(file).resolve() + elif isinstance(file, Path): + file = file.resolve() + else: + raise TypeError(f"Expected Path, str or IO, but got {file!r}") + + with file.open(mode="r") as f: + return parse_yaml_raw_as(model_type, f, context=context) From a4f807e67d5b96288dd6b373a17e9e5c6fa3bbf8 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 23 Jul 2024 00:44:12 +0300 Subject: [PATCH 10/42] Add extremly simple docker-compose.yaml files parser --- dynamic_tasks_app/connectors/compose.py | 76 +++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 dynamic_tasks_app/connectors/compose.py diff --git a/dynamic_tasks_app/connectors/compose.py b/dynamic_tasks_app/connectors/compose.py new file mode 100644 index 0000000..6542d30 --- /dev/null +++ b/dynamic_tasks_app/connectors/compose.py @@ -0,0 +1,76 @@ +from dataclasses import dataclass +from pathlib import Path +from typing import Annotated + +from loguru import logger +from pydantic import AfterValidator, BaseModel, BeforeValidator, ValidationInfo, field_validator, validator + +from ..utils.pydantic_yaml import parse_yaml_file_as + + +def fix_relative_path(v: Path, info: ValidationInfo) -> Path: + if not info.context: + raise Exception("Something went wrong") + + source_path: Path = info.context["source_path"] + return (source_path / v).resolve() + + +@dataclass +class PortInfo: + # external_port: int + internal_port: int + + +def parse_port_info(v: str) -> PortInfo: + if v.count(":") > 1: + raise ValueError(f"Parsing {v!r} error: specifying ip's is not supported") + ports = v.split(":") + if len(ports) == 2: # noqa: PLR2004 + return PortInfo(int(ports[1])) + return PortInfo(int(ports[0])) + + +RelateivePath = Annotated[Path, AfterValidator(fix_relative_path)] +Port = Annotated[PortInfo, BeforeValidator(parse_port_info)] + + +class ServiceBuild(BaseModel): + context: RelateivePath + dockerfile: RelateivePath = Path("Dockerfile") + + +class Service(BaseModel): + image: str | None = None + + build: ServiceBuild | RelateivePath | None = None + + command: list[str] | str | None = None + + ports: list[Port] = [] + + @property + def prepared_command(self) -> list[str] | None: + if not self.command: + return None + + if isinstance(self.command, str): + return self.command.split(" ") + + return self.command + + +class Compose(BaseModel): + version: str + + services: dict[str, Service] + + +def load_compose(source: Path) -> Compose: + yaml = source / "docker-compose.yml" + assert source.is_absolute() + assert source.is_dir() + assert yaml.exists() + + compose = parse_yaml_file_as(Compose, yaml, context={"source_path": source}) + return compose From 1901acfec92778b1c0a25e5f3152ac5ed82821da Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 23 Jul 2024 00:59:03 +0300 Subject: [PATCH 11/42] Heavy work on implementing service runner Now it is possible to run simple tcp services from generic docker-composes --- dynamic_tasks_app/__main__.py | 65 ++- dynamic_tasks_app/connectors/__init__.py | 14 + dynamic_tasks_app/connectors/kub.py | 216 -------- dynamic_tasks_app/connectors/kub/__init__.py | 503 +++++++++++++++++++ dynamic_tasks_app/connectors/kub/client.py | 169 +++++++ dynamic_tasks_app/readme.md | 35 +- 6 files changed, 768 insertions(+), 234 deletions(-) delete mode 100644 dynamic_tasks_app/connectors/kub.py create mode 100644 dynamic_tasks_app/connectors/kub/__init__.py create mode 100644 dynamic_tasks_app/connectors/kub/client.py diff --git a/dynamic_tasks_app/__main__.py b/dynamic_tasks_app/__main__.py index 368cf4b..456c928 100644 --- a/dynamic_tasks_app/__main__.py +++ b/dynamic_tasks_app/__main__.py @@ -1,28 +1,67 @@ import asyncio from pathlib import Path +from cyclopts import App from loguru import logger + +from .connectors.compose import load_compose from .connectors.kub import KubeConnector +app = App("dynamic_tasks_app helper") + -async def test(x: KubeConnector): - # await x.test() +@app.command() +async def build( + docker_login: str, + docker_password: str, + source: Path = Path(__file__).resolve().parent / "extra", + name: str = "yatb-k8s-builder-base", + tag: str = "latest", +) -> None: + async with ( + KubeConnector() as x, + x.api.docker_config_json_secret(docker_login, docker_password) as docker_json_secret, + ): + await x.api.build( + name, + source, + destination_override=f"rubikoid/yatb-k8s-builder-base:{tag}", + secrets=[docker_json_secret], + ) - src = Path("dynamic_tasks_app") / "tests" / "examples" / "builder" + +@app.command() +async def run_service( + src: Path, + name: str | None = None, + flag: str | None = None, +) -> None: src = src.resolve() - await x.api.build(src) + compose = load_compose(src) + + name = name or src.name + flag = flag or "crab{TEST}" + + async with KubeConnector() as x: + await x.api.service(name, compose, "flag{TEST}") + + +@app.command() +async def test(): + async with KubeConnector() as x: + src = Path("dynamic_tasks_app") / "tests" / "examples" / "service" + src = src.resolve() + compose = load_compose(src) + await x.api.service("test-svc", compose, "flag{TEST}") - # await x.test() + await x.test() + # src = Path("dynamic_tasks_app") / "tests" / "examples" / "builder" + # src = src.resolve() + # name = "test-image" -async def main(): - x = KubeConnector() - await x.init() - try: - await test(x) - finally: - await x.close() + # await x.api.build(name, src) if __name__ == "__main__": - asyncio.run(main()) + app() diff --git a/dynamic_tasks_app/connectors/__init__.py b/dynamic_tasks_app/connectors/__init__.py index a81765a..80ffa9e 100644 --- a/dynamic_tasks_app/connectors/__init__.py +++ b/dynamic_tasks_app/connectors/__init__.py @@ -1,5 +1,7 @@ from abc import ABC, abstractmethod from enum import Enum +from types import TracebackType +from typing import AsyncContextManager, Self from uuid import UUID from pydantic import BaseModel @@ -18,6 +20,18 @@ class DynamicTaskInfo(BaseModel): class BaseConnector(ABC): + async def __aenter__(self) -> Self: + await self.init() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self.close() + @abstractmethod async def init(self) -> None: pass diff --git a/dynamic_tasks_app/connectors/kub.py b/dynamic_tasks_app/connectors/kub.py deleted file mode 100644 index a0950fd..0000000 --- a/dynamic_tasks_app/connectors/kub.py +++ /dev/null @@ -1,216 +0,0 @@ -# -import asyncio -from pathlib import Path -import random -import string -import tarfile -import io - -from lightkube.config.kubeconfig import KubeConfig -from lightkube.core.async_client import AsyncClient -from lightkube.resources.core_v1 import Namespace, Pod -from lightkube.models.meta_v1 import ObjectMeta -from lightkube.models.core_v1 import PodSpec, Container, EnvVar -from loguru import logger -from miniopy_async import Minio - -from ..config import settings -from . import BaseConnector, DynamicTaskInfo - - -class ImpossibleError(Exception): - pass - - -# class WTF: -# api: kr8sa.Api - -# async def init(self) -> None: -# self.api = await kr8sa.api(kubeconfig=settings.kube_config_path) - -# async def test(self) -> None: -# logger.info(f"{await self.api.whoami() = }") -# logger.info("Listing pods with their IPs:") -# ret = await self.api.get("pods", namespace=kr8s.ALL) -# if not isinstance(ret, list): -# raise Exception("wtf") - -# for i in ret: -# logger.info(f"{i}") - - -class KubeApi: - BUILD_BUCKET_NAME: str = "dynamic-tasks-build-source" - # BUILD_NAMESPACE: str = "yatb-build-namespace" - BUILD_NAMESPACE: str = "default" - - client: AsyncClient - s3: Minio - - async def init(self) -> None: - # setup kube - config = KubeConfig.from_file(settings.kube_config_path) if settings.kube_config_path else None - self.client = AsyncClient(config) # type: ignore # lib broken - - # setup s3 - self.s3 = Minio( - endpoint=settings.s3_endpoint, - access_key=settings.S3_ACCESS, - secret_key=settings.S3_SECRET, - secure=False, # http for False, https for True - ) - - # setup buckets - await self.setup_s3() - - async def close(self) -> None: - await self.client.close() - - async def setup_s3(self) -> None: - if not await self.s3.bucket_exists(self.BUILD_BUCKET_NAME): - await self.s3.make_bucket(self.BUILD_BUCKET_NAME) - - @classmethod - def generate_name(cls, alphabet: str = string.digits + string.ascii_lowercase, n: int = 16) -> str: - return "".join(random.choices(alphabet, k=n)) # noqa: S311 - - async def build(self, source: Path, *, _base_ip: str = "10.42.0.1") -> None: - assert source.is_absolute() - assert source.is_dir() - assert (source / "Dockerfile").exists() - - build_name = self.generate_name() - build_name = "y4fchuw25jbrh0fz" - raw_img_name = f"{build_name}.tar.gz" - - with io.BytesIO() as buff: - with tarfile.open(fileobj=buff, mode="w:gz") as tar: - for file in source.iterdir(): - tar.add(file, arcname=file.relative_to(source)) # string absolute long path - buff.seek(0) # reset to 0. because... you knew. - - size = len(buff.getbuffer()) - await self.s3.put_object( - self.BUILD_BUCKET_NAME, - raw_img_name, - buff, - length=size, - ) - - logger.info( - f"Uploaded archive from {source} ({size = }) as 's3://{self.BUILD_BUCKET_NAME}/{raw_img_name}'", - ) - - kaniko = await self.client.create( - Pod( - metadata=ObjectMeta(name=f"kaniko-build-{build_name}", namespace=self.BUILD_NAMESPACE), - spec=PodSpec( - containers=[ - Container( - name="kaniko", - image="gcr.io/kaniko-project/executor:v1.23.2", - args=[ - "--dockerfile=/kaniko/buildcontext/Dockerfile", - f"--context=s3://{self.BUILD_BUCKET_NAME}/{raw_img_name}", - f"--destination={_base_ip}:5000/prebuild-images/{build_name}:latest", - "--cache=true", - "--cache-run-layers=true", - "--cache-copy-layers=true", - f"--cache-repo={_base_ip}:5000/cache", - ], - env=[ - EnvVar( - "S3_ENDPOINT", - value=f"http://{_base_ip}:{settings.S3_PORT}", - ), - # need to specify this to use path-stye minio, - # and don't try to resolve http://bucket.ip:port/file - EnvVar("S3_FORCE_PATH_STYLE", "true"), - # i have AWS. Don't work without this - EnvVar("AWS_REGION", "us-east-1"), # i have AWS - EnvVar("AWS_ACCESS_KEY_ID", settings.S3_ACCESS), - EnvVar("AWS_SECRET_ACCESS_KEY", settings.S3_SECRET), - ], - ), - ], - restartPolicy="Never", - ), - ), - ) - - if not kaniko.metadata or not kaniko.metadata.name or not kaniko.metadata.namespace: - raise ImpossibleError - - try: - kaniko = await self.client.wait( - Pod, - kaniko.metadata.name, - for_conditions=["PodReadyToStartContainers"], - namespace=kaniko.metadata.namespace, - ) - - if not kaniko.metadata or not kaniko.metadata.name or not kaniko.metadata.namespace or not kaniko.status: - raise ImpossibleError - - logger.info( - f"Kaniko pod created: '{kaniko.metadata.namespace}.{kaniko.metadata.name}'", - ) - - async for line in self.client.log( - kaniko.metadata.name, - namespace=kaniko.metadata.namespace, - follow=True, - newlines=False, - ): - logger.info(line) - - finally: - if not kaniko.metadata or not kaniko.metadata.name or not kaniko.metadata.namespace: - raise ImpossibleError - - await self.client.delete(Pod, kaniko.metadata.name, namespace=kaniko.metadata.namespace) - - async def test(self): - logger.info("Simple cluster status:") - async for ns in self.client.list(Namespace): - if not ns.metadata or not ns.metadata.name: - logger.warning(f"{ns = } no metadata or name") - continue - - logger.info(f"Found ns: {ns.metadata.name}") - - async for pod in self.client.list(Pod, namespace="*"): - if not pod.metadata or not pod.status: - logger.warning(f"{pod = } no metadata / status") - continue - - logger.info(f"{pod.metadata.namespace}.{pod.metadata.name}: {pod.status.podIPs}") - - -class KubeConnector(BaseConnector): - api: KubeApi - - def __init__(self) -> None: - self.api = KubeApi() - super().__init__() - - async def init(self) -> None: - await self.api.init() - - async def test(self) -> None: - await self.api.test() - - async def close(self) -> None: - await self.api.close() - - async def start(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError - - async def stop(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError - - async def restart(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError - - async def info(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError diff --git a/dynamic_tasks_app/connectors/kub/__init__.py b/dynamic_tasks_app/connectors/kub/__init__.py new file mode 100644 index 0000000..3b14293 --- /dev/null +++ b/dynamic_tasks_app/connectors/kub/__init__.py @@ -0,0 +1,503 @@ +import asyncio +import base64 +import io +import json +import random +import string +import tarfile +from collections.abc import AsyncGenerator, AsyncIterable +from contextlib import AsyncExitStack, asynccontextmanager +from pathlib import Path, PurePosixPath +from typing import TypeGuard, TypeVar + +from lightkube import operators as op +from lightkube.config.kubeconfig import KubeConfig +from lightkube.core.async_client import AsyncClient +from lightkube.core.exceptions import ApiError +from lightkube.models.apps_v1 import DeploymentSpec +from lightkube.models.batch_v1 import JobSpec +from lightkube.models.core_v1 import ( + Container, + ContainerPort, + EnvVar, + EnvVarSource, + KeyToPath, + PodSpec, + PodTemplateSpec, + SecretKeySelector, + SecretVolumeSource, + ServicePort, + ServiceSpec, + Volume, + VolumeMount, +) +from lightkube.models.meta_v1 import ObjectMeta +from lightkube.resources.apps_v1 import Deployment +from lightkube.resources.batch_v1 import Job +from lightkube.resources.core_v1 import Namespace, Node, Pod, Secret, Service +from lightkube.types import CascadeType +from loguru import logger +from miniopy_async import Minio + +from ...config import settings +from .. import BaseConnector, DynamicTaskInfo +from ..compose import Compose +from .client import AsyncClientEx, ImpossibleError, check_meta + +_T = TypeVar("_T") + + +async def async_to_list(source: AsyncIterable[_T]) -> list[_T]: + return [t async for t in source] + + +async def async_first(source: AsyncIterable[_T]) -> _T: + async for t in source: + return t + raise Exception("not found") + + +class KubeApi: + _BASE_IP: str = "192.168.1.44" + + BUILD_BUCKET_NAME: str = "dynamic-tasks-build-source" + # BUILD_NAMESPACE: str = "yatb-build-namespace" + BUILD_NAMESPACE: str = "yatb-build" + + RUN_NAMESPACE: str = "yatb-run" + + client: AsyncClientEx + s3: Minio + + async def init(self) -> None: + # setup kube + config = KubeConfig.from_file(settings.kube_config_path) if settings.kube_config_path else None + self.client = AsyncClientEx(config) # type: ignore # lib broken + + # setup s3 + self.s3 = Minio( + endpoint=settings.s3_endpoint, + access_key=settings.S3_ACCESS, + secret_key=settings.S3_SECRET, + secure=False, # http for False, https for True + ) + + # setup buckets + await self.setup_s3() + + # setup namespaces + await self.setup_namespaces() + + async def close(self) -> None: + await self.client.close() + + async def setup_namespaces(self) -> None: + for ns in [self.BUILD_NAMESPACE]: + logger.info(f"Checking for {ns = } existance") + try: + res = await self.client.get(Namespace, ns) + except ApiError as ex: + if ex.status.code != 404: # noqa: PLR2004 + logger.warning(f"{ex = } {ex.status = }") + raise + + logger.info(f"{ns = } not found, creating") + res = await self.client.create(Namespace(metadata=ObjectMeta(name=ns))) + logger.info(f"{res = } created") + else: + logger.info(f"{ns = } exists") + + async def setup_network(self) -> None: + # TODO: fix me + pass + + async def setup_s3(self) -> None: + if not await self.s3.bucket_exists(self.BUILD_BUCKET_NAME): + await self.s3.make_bucket(self.BUILD_BUCKET_NAME) + + @classmethod + def generate_name(cls, alphabet: str = string.digits + string.ascii_lowercase, n: int = 16) -> str: + return "".join(random.choices(alphabet, k=n)) # noqa: S311 + + def get_image_name(self, name: str) -> str: + return f"{self._BASE_IP}:5000/prebuild-images/{name}:latest" + + def fix_image_name(self, src: str) -> str: + return src.replace(f"{self._BASE_IP}:5000", "registry.local") + + @asynccontextmanager + async def docker_config_json_secret( + self, + docker_login: str, + docker_password: str, + name: str | None = None, + ) -> AsyncGenerator[Secret, None]: + name = name or f"dockerconfig-{docker_login}" + + auth = base64.b64encode(f"{docker_login}:{docker_password}".encode()).decode() + raw_secret = {"auths": {"https://index.docker.io/v1/": {"auth": auth}}} + encoded_secret = base64.b64encode(json.dumps(raw_secret).encode()).decode() + + async with self.client.ctx( + Secret( + metadata=ObjectMeta( + name=name, + namespace=self.BUILD_NAMESPACE, + annotations={ + "rubikoid.ru/mountVolume-path": "/kaniko/.docker/config.json", + "rubikoid.ru/mountVolume-key": ".dockerconfigjson", + "rubikoid.ru/mountVolume-ro": "True", + }, + ), + type="kubernetes.io/dockerconfigjson", + immutable=True, + data={".dockerconfigjson": encoded_secret}, + ), + ) as secret: + yield secret + + async def build( + self, + name: str, + source: Path, + *, + destination_override: str | None = None, + secrets: list[Secret] | None = None, + dockerfile: Path | str = Path("Dockerfile"), + ) -> str: + assert source.is_absolute() + assert source.is_dir() + assert (source / dockerfile).exists() + + build_name = name # self.generate_name() + raw_img_name = f"{build_name}.tar.gz" + + with io.BytesIO() as buff: + with tarfile.open(fileobj=buff, mode="w:gz") as tar: + for file in source.iterdir(): + tar.add(file, arcname=file.relative_to(source)) # string absolute long path + buff.seek(0) # reset to 0. because... you knew. + + size = len(buff.getbuffer()) + await self.s3.put_object( + self.BUILD_BUCKET_NAME, + raw_img_name, + buff, + length=size, + ) + + logger.info( + f"Uploaded archive from {source} ({size = }) as 's3://{self.BUILD_BUCKET_NAME}/{raw_img_name}'", + ) + + # some customization + destination = destination_override or self.get_image_name(build_name) + + volume_mounts: list[VolumeMount] = [] + volumes: list[Volume] = [] + for secret in secrets or []: + if not check_meta(secret.metadata) or not secret.metadata.annotations: + raise ImpossibleError + + volume_name = f"{secret.metadata.name}-volume" + secret_key = secret.metadata.annotations["rubikoid.ru/mountVolume-key"] + volumes.append( + Volume( + name=volume_name, + secret=SecretVolumeSource( + secretName=secret.metadata.name, + items=[ + KeyToPath( + secret_key, + path=secret_key, + ) + ], + ), + ) + ) + + mount_path = secret.metadata.annotations["rubikoid.ru/mountVolume-path"] + read_only = bool(secret.metadata.annotations.get("rubikoid.ru/mountVolume-ro", "True")) + volume_mounts.append( + VolumeMount( + name=volume_name, + mountPath=mount_path, + readOnly=read_only, + subPath=secret_key, + ), + ) + + _kaniko = self.client.simple_job( + f"kaniko-build-{build_name}", + namespace=self.BUILD_NAMESPACE, + pod_spec=PodSpec( + containers=[ + Container( + name="kaniko", + # image="rubikoid/yatb-k8s-builder-base:base", # "gcr.io/kaniko-project/executor:v1.23.2", + image="gcr.io/kaniko-project/executor:v1.23.2", + args=[ + f"--dockerfile={PurePosixPath('/kaniko/buildcontext') / dockerfile}", + f"--context=s3://{self.BUILD_BUCKET_NAME}/{raw_img_name}", + f"--destination={destination}", + "--cache=true", + "--cache-run-layers=true", + "--cache-copy-layers=true", + f"--cache-repo={self._BASE_IP}:5000/cache", + ], + # args=[ + # "-c", + # """ + # env; + # ls -la /kaniko/.docker; + # ls -la /kaniko/.docker/config.json; + # cat /kaniko/.docker/config.json; + # """.strip(), + # ], + env=[ + EnvVar( + "S3_ENDPOINT", + value=f"http://{self._BASE_IP}:{settings.S3_PORT}", + ), + # need to specify this to use path-stye minio, + # and don't try to resolve http://bucket.ip:port/file + EnvVar("S3_FORCE_PATH_STYLE", "true"), + # i have AWS. Don't work without this + EnvVar("AWS_REGION", "us-east-1"), # i have AWS + EnvVar("AWS_ACCESS_KEY_ID", settings.S3_ACCESS), + EnvVar("AWS_SECRET_ACCESS_KEY", settings.S3_SECRET), + ], + volumeMounts=volume_mounts, + ), + ], + volumes=volumes, + restartPolicy="Never", + ), + ) + + async with self.client.ctx(_kaniko, cascade=CascadeType.FOREGROUND) as kaniko: + if not check_meta(kaniko.metadata): + raise ImpossibleError + + logger.info("Wait for job ready") + await self.client.wait_ex( + Job, + kaniko.metadata.name, + namespace=kaniko.metadata.namespace, + cb=lambda x: x.get("ready", 0) == 1, + ) + + kaniko_pod = await async_first( + self.client.list( + Pod, + labels={"app.kubernetes.io/name": op.equal(kaniko.metadata.name)}, + namespace=kaniko.metadata.namespace, + ), + ) + + if not check_meta(kaniko_pod.metadata): + raise ImpossibleError + + logger.info("Waiting for kaniko pod be ready") + + kaniko_pod = await self.client.wait( + Pod, + kaniko_pod.metadata.name, + for_conditions=["PodReadyToStartContainers"], + namespace=kaniko.metadata.namespace, + ) + + if not check_meta(kaniko_pod.metadata): + raise ImpossibleError + + logger.info( + f"Kaniko pod created: '{kaniko_pod.metadata.namespace}.{kaniko_pod.metadata.name}'", + ) + + async for line in self.client.log( + kaniko_pod.metadata.name, + namespace=kaniko_pod.metadata.namespace, + follow=True, + newlines=False, + ): + logger.trace(f"Building {name!r}: {line}") + logger.info(f"{name!r} builded as {destination!r}") + return destination + + async def oneshot(self, name: str) -> None: + image = self.get_image_name(name) + + @asynccontextmanager + async def run_ns(self, name: str) -> AsyncGenerator[tuple[str, Namespace], None]: + run_prefix = self.generate_name() + res = await self.client.create( + Namespace( + metadata=ObjectMeta( + name=f"{name}-{run_prefix}", + ) + ) + ) + + if not res.metadata or not res.metadata.name: + raise ImpossibleError(f"{res = }") + + try: + yield res.metadata.name, res + finally: + await self.client.delete( + Namespace, + name=res.metadata.name, + grace_period=0, + cascade=CascadeType.FOREGROUND, + ) + logger.info(f"Cleaned namespace '{res.metadata.name}'") + + async def service(self, name: str, compose: Compose, flag: str) -> None: + # images: dict[str, str] = {} + containers: dict[str, Container] = {} + # build stage + for svc_name, svc in compose.services.items(): + if not svc.build: + if not svc.image: + raise Exception("no") + image = svc.image + elif isinstance(svc.build, Path): + # TODO: do not build on every run + image = await self.build(f"{name}-{svc_name}", svc.build) + else: + image = await self.build( + f"{name}-{svc_name}", + svc.build.context, + dockerfile=svc.build.dockerfile, + ) + + image = self.fix_image_name(image) + + containers[svc_name] = Container( + name=svc_name, + image=image, + command=svc.prepared_command, + ports=[ContainerPort(port.internal_port) for port in svc.ports], + # env=[], + ) + + def patch_container(container: Container, secret: Secret, key: str) -> Container: + if not check_meta(secret.metadata): + raise ImpossibleError + + if container.env is None: + container.env = [] + container.env.append( + EnvVar( + name=key, + valueFrom=EnvVarSource( + secretKeyRef=SecretKeySelector( + name=secret.metadata.name, + key=key, + ), + ), + ), + ) + + return container + + # run stage + async with ( + self.run_ns(f"{name}") as (ns_name, ns), + self.client.ctx( + Secret( + metadata=ObjectMeta( + name=f"{name}-flag", + namespace=ns_name, + ), + immutable=True, + stringData={"FLAG": flag}, + ), + ) as flag_secret, + AsyncExitStack() as deployments_exit_stack, + ): + services: list[Service] = [] + + for svc_name, container in containers.items(): + patch_container(container, flag_secret, "FLAG") + + if container.ports: + port = container.ports[0] # TODO: handle multiple ports... + service = self.client.ctx( + self.client.simple_service( + svc_name, + ns_name, + port.containerPort, + [self._BASE_IP], + ), + ) + service = await deployments_exit_stack.enter_async_context(service) + services.append(service) + + deployment = self.client.ctx( + self.client.simple_deployment( + svc_name, + ns_name, + PodSpec(containers=[container]), + ), + ) + await deployments_exit_stack.enter_async_context(deployment) + + for service in services: + if not service.spec or not service.spec.externalIPs or not service.spec.ports: + raise ImpossibleError + + addr = f"http://{service.spec.externalIPs[0]}:{service.spec.ports[0].nodePort}" + + logger.info( + f"Started at {service.spec.externalIPs} -> {[i.nodePort for i in service.spec.ports]}; " + f"{addr = }; " # ... + f"{service.spec.clusterIPs = }", + ) + + input(f"...?: ") + + async def test(self): + logger.info("Simple cluster status:") + async for ns in self.client.list(Namespace): + if not ns.metadata or not ns.metadata.name: + logger.warning(f"{ns = } no metadata or name") + continue + + logger.info(f"Found ns: {ns.metadata.name}") + + async for pod in self.client.list(Pod, namespace="*"): + if not pod.metadata or not pod.status: + logger.warning(f"{pod = } no metadata / status") + continue + + logger.info(f"{pod.metadata.namespace}.{pod.metadata.name}: {pod.status.podIPs}") + + +class KubeConnector(BaseConnector): + api: KubeApi + + def __init__(self) -> None: + self.api = KubeApi() + super().__init__() + + async def init(self) -> None: + await self.api.init() + + async def test(self) -> None: + await self.api.test() + + async def close(self) -> None: + await self.api.close() + + async def start(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + async def stop(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + async def restart(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError + + async def info(self, task_info: DynamicTaskInfo) -> None: + raise NotImplementedError diff --git a/dynamic_tasks_app/connectors/kub/client.py b/dynamic_tasks_app/connectors/kub/client.py new file mode 100644 index 0000000..dabdcbd --- /dev/null +++ b/dynamic_tasks_app/connectors/kub/client.py @@ -0,0 +1,169 @@ +from collections.abc import AsyncGenerator, Callable +from contextlib import asynccontextmanager +from typing import TypeGuard, TypeVar, overload + +from lightkube.core import resource as r +from lightkube.core.async_client import AsyncClient +from lightkube.core.client import AllNamespacedResource, GlobalResource +from lightkube.core.exceptions import ObjectDeleted +from lightkube.models.apps_v1 import DeploymentSpec +from lightkube.models.batch_v1 import JobSpec +from lightkube.models.core_v1 import ( + PodSpec, + PodTemplateSpec, + ServicePort, + ServiceSpec, +) +from lightkube.models.meta_v1 import LabelSelector, ObjectMeta +from lightkube.resources.apps_v1 import Deployment +from lightkube.resources.batch_v1 import Job +from lightkube.resources.core_v1 import Service +from lightkube.types import CascadeType +from loguru import logger + + +class ImpossibleError(Exception): + pass + + +class NonOptMeta(ObjectMeta): + name: str + namespace: str + + +def check_meta(metadata: ObjectMeta | None) -> TypeGuard[NonOptMeta]: + return bool(metadata and metadata.name and metadata.namespace) + + +_T = TypeVar("_T", bound=r.NamespacedResource) + + +class AsyncClientEx(AsyncClient): + @overload + async def wait_ex( + self, + res: type[GlobalResource], + name: str, + *, + cb: Callable[[dict], bool], + ) -> GlobalResource: ... + + @overload + async def wait_ex( + self, + res: type[AllNamespacedResource], + name: str, + *, + cb: Callable[[dict], bool], + namespace: str | None = None, + ) -> AllNamespacedResource: ... + + async def wait_ex( + self, + res, # type[GlobalResource] | type[AllNamespacedResource] + name: str, + *, + cb: Callable[[dict], bool], + namespace: str | None = None, + ): + """ + Wait for specified conditions, but better. + + **parameters** + + * **res** - Resource kind. + * **name** - Name of resource to wait for. + * **namespace** - *(optional)* Name of the namespace containing the object (Only for namespaced resources). + """ + + kind = r.api_info(res).plural + full_name = f"{kind}/{name}" + + watch = self.watch( + res, + namespace=namespace, # pyright: ignore[reportArgumentType] + fields={"metadata.name": name}, + ) + try: + async for op, obj in watch: + if obj.status is None: + continue + + if op == "DELETED": + raise ObjectDeleted(full_name) + + try: + status = obj.status.to_dict() + except AttributeError: + status = obj.status + + if cb(status): + return obj + finally: + # we ensure the async generator is closed before returning + await watch.aclose() # pyright: ignore[reportAttributeAccessIssue] + + def simple_job(self, name: str, namespace: str, pod_spec: PodSpec) -> Job: + return Job( + metadata=ObjectMeta(name=name, namespace=namespace), + spec=JobSpec( + template=PodTemplateSpec( + metadata=ObjectMeta(labels={"app.kubernetes.io/name": name}), + spec=pod_spec, + ), + ), + ) + + def simple_deployment(self, name: str, namespace: str, pod_spec: PodSpec, replicas: int = 1) -> Deployment: + return Deployment( + metadata=ObjectMeta(name=name, namespace=namespace), + spec=DeploymentSpec( + replicas=replicas, + selector=LabelSelector(matchLabels={"app.kubernetes.io/name": name}), + template=PodTemplateSpec( + metadata=ObjectMeta(labels={"app.kubernetes.io/name": name}), + spec=pod_spec, + ), + ), + ) + + def simple_service(self, name: str, namespace: str, target_port: int, extrenal_ips: list[str]) -> Service: + return Service( + metadata=ObjectMeta( + name=name, + namespace=namespace, + ), + spec=ServiceSpec( + type="NodePort", + externalIPs=extrenal_ips, + selector={"app.kubernetes.io/name": name}, + ports=[ServicePort(port=target_port)], + ), + ) + + @asynccontextmanager + async def ctx( + self, + resource: _T, + *, + grace_period: int | None = None, + cascade: CascadeType | None = None, + ) -> AsyncGenerator[_T, None]: + res = await self.create(resource) + + # FIXME: make this better. I don't want to do it now. + meta: ObjectMeta = res.metadata # pyright: ignore[reportAttributeAccessIssue] + if not check_meta(meta): + raise ImpossibleError(f"{res = }") + + try: + yield res + finally: + await self.delete( + type(resource), + meta.name, + namespace=meta.namespace, + grace_period=grace_period, # pyright: ignore[reportArgumentType] # lib + cascade=cascade, # pyright: ignore[reportArgumentType] # lib + ) + logger.info(f"Cleaned '{type(resource).__name__}/{meta.namespace}.{meta.name}'") diff --git a/dynamic_tasks_app/readme.md b/dynamic_tasks_app/readme.md index 73e7184..82e2e7a 100644 --- a/dynamic_tasks_app/readme.md +++ b/dynamic_tasks_app/readme.md @@ -14,7 +14,7 @@ Разработчик таски предоставляет либо: -- для файловых тасок: `Dockerfile`, который собирает контейнер, при запуске которого генерируются файлики таска куда-нибудь в специальную директорию, типа `/export`, которая, в свою очередь, архивируется и отдается пользователю. +- для файловых тасок: `Dockerfile`, который собирает контейнер, при запуске которого генерируются файлики таска куда-нибудь в специальную директорию, типа `/export`, которая, в свою очередь, архивируется и отдается пользователю. Этот dockerfile ДОЛЖЕН быть основан на т.н. образе с менеджером файловых тасок, который реализует запуск авторского генератора и пуш получившихся файлов в s3. - для сервисных тасок: `docker-compose.yaml`, в котором экспозится какой-то порт, плюс описание типа порта - http(s) / tcp (для начала можно сделать простой tcp) Инфраструктура динамических тасок состоит из: @@ -37,7 +37,16 @@ flow юзера&таски такой: 6. Кэш для файловых тасок храним всю цтфку, генерируя один раз. 7. Сервисные таски уничтожаем через таймаут после сдачи флага, либо после определенного времени, типа, пары часов. Можно сделать уведомление/подтвердение того, что таска ещё решается? -Вдохновление ещё тут можно посмотреть: +Вдохновление ещё тут можно посмотреть: + + +Надо отметить несколько особенностей сетевой инфраструктуры, которые желательно решать для полноценных CTF'ок (для MVP не так критично): + +1. Строго не должно быть никакой связи между контейнерами, которые принадлежат разным игрокам. +2. Возможно, стоит ставить L4 userland прокси, проверяющую токены команд перед TCP-сервисами. + 1. Вероятно, это также хочется делать, чтобы не раскрывать IP нод-воркеров. + 2. Хотя полностью от этого уйти не выйдет, если есть таска с условной SSRF, айпишник воркера всё равно можно спалить (хотя решается общей сетью с натом на выход наружу - всё зависит от сетапа кубера). + 3. Либо надо очень сильно шаманить с кубером, чтобы переизобрести load balancer +3. Для HTTP-шных тасок не так критично, так как планируется разруливать эту ситуацию путем уникальных поддоменов, которые фактически и являются токенами. ## Декомпозиция на задачки @@ -45,10 +54,26 @@ flow юзера&таски такой: 1. [ ] k8s-like api (k3s in my case). 1. [x] Очень базово понять, как кубер функционирует 2. [x] Научиться поднимать хоть в каком-то виде + 1. [x] k3s single node (nix) + 2. [ ] k3s multi node + 3. [ ] orignal k8s...? 3. [x] Научиться билдить контейнеры - 4. [ ] Научиться запускать контейнеры - 5. [ ] Научиться запускать композы - 6. [ ] Научиться пробрасывать порты наружу + 1. [x] Сидим на kaniko. Говорят, медленно, но пока вроде норм + 4. [ ] Научиться билдить статические таски + 5. [ ] Научиться отдавать статические таски + 6. [x] Научиться запускать контейнеры + 7. [x] Научиться запускать композы + 1. [ ] Caveat - только простые. + 2. [ ] Надо найти примеры сложных композов, которые запустятся у меня неправильно + 3. [ ] И научиться такие запускать + 4. [ ] Поправить пересборку композа на каждый запуск (долго) + 8. [ ] Научиться пробрасывать порты наружу + 1. [x] TCP + 2. [ ] HTTP(s) + 1. [ ] DNS + 1. [ ] Self-Hosted + 2. [ ] Cloudflare + 2. [ ] Научиться колдовать над reverse proxy 2. [ ] Docker Swarm - он простой, но не очень фичастый. 1. [ ] Научиться поднимать хоть в каком-то виде 2. [ ] Научиться билдить контейнеры From 290a94941652afa2e96f835f55acca15c5d88604 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 23 Jul 2024 00:59:16 +0300 Subject: [PATCH 12/42] Add & update some of examples for testing system --- .../tests/examples/builder/Dockerfile | 2 +- .../tests/examples/service/Dockerfile | 12 ++++++++++++ .../tests/examples/service/docker-compose.yml | 5 +++++ .../tests/examples/service/requirements.txt | 2 ++ .../tests/examples/service/src/__init__.py | 19 +++++++++++++++++++ 5 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 dynamic_tasks_app/tests/examples/service/Dockerfile create mode 100644 dynamic_tasks_app/tests/examples/service/docker-compose.yml create mode 100644 dynamic_tasks_app/tests/examples/service/requirements.txt create mode 100644 dynamic_tasks_app/tests/examples/service/src/__init__.py diff --git a/dynamic_tasks_app/tests/examples/builder/Dockerfile b/dynamic_tasks_app/tests/examples/builder/Dockerfile index 672d9c2..21eb85e 100644 --- a/dynamic_tasks_app/tests/examples/builder/Dockerfile +++ b/dynamic_tasks_app/tests/examples/builder/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim +FROM rubikoid/yatb-k8s-builder-base:latest WORKDIR /app diff --git a/dynamic_tasks_app/tests/examples/service/Dockerfile b/dynamic_tasks_app/tests/examples/service/Dockerfile new file mode 100644 index 0000000..7057f83 --- /dev/null +++ b/dynamic_tasks_app/tests/examples/service/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY requirements.txt ./ + +RUN pip install --no-cache-dir -r requirements.txt + +COPY src src + +ENTRYPOINT [ "uvicorn", "src:app" ] +CMD [ "--host=0.0.0.0", "--port=80" ] diff --git a/dynamic_tasks_app/tests/examples/service/docker-compose.yml b/dynamic_tasks_app/tests/examples/service/docker-compose.yml new file mode 100644 index 0000000..ef93ea9 --- /dev/null +++ b/dynamic_tasks_app/tests/examples/service/docker-compose.yml @@ -0,0 +1,5 @@ +version: "3" + +services: + main: + build: . diff --git a/dynamic_tasks_app/tests/examples/service/requirements.txt b/dynamic_tasks_app/tests/examples/service/requirements.txt new file mode 100644 index 0000000..97dc7cd --- /dev/null +++ b/dynamic_tasks_app/tests/examples/service/requirements.txt @@ -0,0 +1,2 @@ +fastapi +uvicorn diff --git a/dynamic_tasks_app/tests/examples/service/src/__init__.py b/dynamic_tasks_app/tests/examples/service/src/__init__.py new file mode 100644 index 0000000..f46a246 --- /dev/null +++ b/dynamic_tasks_app/tests/examples/service/src/__init__.py @@ -0,0 +1,19 @@ +from fastapi import FastAPI +import os + +from fastapi.responses import PlainTextResponse + +FLAG = os.environ.get("FLAG", "flag{example}") + + +app = FastAPI() + + +@app.get("/flag") +async def flag() -> PlainTextResponse: + return PlainTextResponse(f"Flag: {FLAG}") + + +@app.get("/") +async def index() -> PlainTextResponse: + return PlainTextResponse("Alive") From a4dd8adfc193376f3c20b6aa50f1bb647141c51f Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 23 Jul 2024 00:59:28 +0300 Subject: [PATCH 13/42] Add static tasks builder manager --- dynamic_tasks_app/extra/Dockerfile | 10 ++++++++++ dynamic_tasks_app/extra/manager.py | 23 +++++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 dynamic_tasks_app/extra/Dockerfile create mode 100644 dynamic_tasks_app/extra/manager.py diff --git a/dynamic_tasks_app/extra/Dockerfile b/dynamic_tasks_app/extra/Dockerfile new file mode 100644 index 0000000..cb4e81a --- /dev/null +++ b/dynamic_tasks_app/extra/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.12-slim + +WORKDIR /build-manager + +RUN python3 -m venv .venv +RUN .venv/bin/pip install --no-cache-dir "cyclopts==2.9.3" "rich==13.7.1" "httpx==0.22.0" + +COPY manager.py manager.py + +ENTRYPOINT ["/build-manager/.venv/bin/python3", "/build-manager/manager.py"] diff --git a/dynamic_tasks_app/extra/manager.py b/dynamic_tasks_app/extra/manager.py new file mode 100644 index 0000000..0a9551d --- /dev/null +++ b/dynamic_tasks_app/extra/manager.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# just a very simple builder + +import rich +from cyclopts import App +from cyclopts.config import Env + +app = App( + "Task building manager", + config=Env(""), +) +c = rich.console.Console() + + +@app.command() +def run( + flag: str, +): + c.log(f"Building task with {flag = }") + + +if __name__ == "__main__": + app() From 64f4876c10df052773fd4a4bd9d33bbff7114d8f Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Tue, 23 Jul 2024 00:59:50 +0300 Subject: [PATCH 14/42] Update dynamic tasks req.txt --- requirements-dynamic.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements-dynamic.txt b/requirements-dynamic.txt index 120f261..0c7a660 100644 --- a/requirements-dynamic.txt +++ b/requirements-dynamic.txt @@ -10,3 +10,6 @@ fastapi==0.101.1 uvicorn==0.17.6 loguru==0.7.2 + +ruamel.yaml==0.18.6 +cyclopts==2.9.3 From 321f2cce67666289e4e0b02d5eadf85e6fb0ee66 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Thu, 25 Jul 2024 03:31:36 +0300 Subject: [PATCH 15/42] WIP: Move from NodePort --- dynamic_tasks_app/connectors/kub/__init__.py | 3 +++ dynamic_tasks_app/connectors/kub/client.py | 12 ++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/dynamic_tasks_app/connectors/kub/__init__.py b/dynamic_tasks_app/connectors/kub/__init__.py index 3b14293..798a5d2 100644 --- a/dynamic_tasks_app/connectors/kub/__init__.py +++ b/dynamic_tasks_app/connectors/kub/__init__.py @@ -427,6 +427,7 @@ def patch_container(container: Container, secret: Secret, key: str) -> Container self.client.simple_service( svc_name, ns_name, + 31337, port.containerPort, [self._BASE_IP], ), @@ -447,6 +448,8 @@ def patch_container(container: Container, secret: Secret, key: str) -> Container if not service.spec or not service.spec.externalIPs or not service.spec.ports: raise ImpossibleError + logger.info(f"{service = }") + addr = f"http://{service.spec.externalIPs[0]}:{service.spec.ports[0].nodePort}" logger.info( diff --git a/dynamic_tasks_app/connectors/kub/client.py b/dynamic_tasks_app/connectors/kub/client.py index dabdcbd..30a3c14 100644 --- a/dynamic_tasks_app/connectors/kub/client.py +++ b/dynamic_tasks_app/connectors/kub/client.py @@ -127,17 +127,21 @@ def simple_deployment(self, name: str, namespace: str, pod_spec: PodSpec, replic ), ) - def simple_service(self, name: str, namespace: str, target_port: int, extrenal_ips: list[str]) -> Service: + def simple_service( + self, name: str, namespace: str, external_port: int, target_port: int, external_ips: list[str] + ) -> Service: return Service( metadata=ObjectMeta( name=name, namespace=namespace, ), spec=ServiceSpec( - type="NodePort", - externalIPs=extrenal_ips, + # type="LoadBalancer", + # allocateLoadBalancerNodePorts=False, + # type="NodePort", + externalIPs=external_ips, selector={"app.kubernetes.io/name": name}, - ports=[ServicePort(port=target_port)], + ports=[ServicePort(port=external_port, targetPort=target_port)], ), ) From 19aa2cc6cce53794f60d57c4615f95bf7c8c4dad Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sun, 4 Aug 2024 20:04:58 +0300 Subject: [PATCH 16/42] Don't rebuild already builded images Implemented with pushing special sha256-hexdigest of input context tag and checking for tag existance. --- dynamic_tasks_app/connectors/kub/__init__.py | 49 ++++++++++++++++++-- requirements-dynamic.txt | 2 + 2 files changed, 46 insertions(+), 5 deletions(-) diff --git a/dynamic_tasks_app/connectors/kub/__init__.py b/dynamic_tasks_app/connectors/kub/__init__.py index 798a5d2..87163a3 100644 --- a/dynamic_tasks_app/connectors/kub/__init__.py +++ b/dynamic_tasks_app/connectors/kub/__init__.py @@ -1,5 +1,6 @@ import asyncio import base64 +import hashlib import io import json import random @@ -10,6 +11,8 @@ from pathlib import Path, PurePosixPath from typing import TypeGuard, TypeVar +from aiohttp.client_exceptions import ClientResponseError +from docker_registry_client_async import DockerRegistryClientAsync, FormattedSHA256, ImageName, Manifest from lightkube import operators as op from lightkube.config.kubeconfig import KubeConfig from lightkube.core.async_client import AsyncClient @@ -61,13 +64,13 @@ class KubeApi: _BASE_IP: str = "192.168.1.44" BUILD_BUCKET_NAME: str = "dynamic-tasks-build-source" - # BUILD_NAMESPACE: str = "yatb-build-namespace" BUILD_NAMESPACE: str = "yatb-build" - RUN_NAMESPACE: str = "yatb-run" + RUN_NAMESPACE: str = "yatb-run" # not used now... :hm" client: AsyncClientEx s3: Minio + drca: DockerRegistryClientAsync async def init(self) -> None: # setup kube @@ -82,6 +85,9 @@ async def init(self) -> None: secure=False, # http for False, https for True ) + DockerRegistryClientAsync.DEFAULT_PROTOCOL = "http" # FIXME: tmp + self.drca = DockerRegistryClientAsync() + # setup buckets await self.setup_s3() @@ -90,6 +96,7 @@ async def init(self) -> None: async def close(self) -> None: await self.client.close() + await self.drca.close() async def setup_namespaces(self) -> None: for ns in [self.BUILD_NAMESPACE]: @@ -164,6 +171,7 @@ async def build( destination_override: str | None = None, secrets: list[Secret] | None = None, dockerfile: Path | str = Path("Dockerfile"), + skip_build: bool = False, ) -> str: assert source.is_absolute() assert source.is_dir() @@ -172,12 +180,37 @@ async def build( build_name = name # self.generate_name() raw_img_name = f"{build_name}.tar.gz" + # some customization + destination = destination_override or self.get_image_name(build_name) + + if skip_build: + return destination + with io.BytesIO() as buff: with tarfile.open(fileobj=buff, mode="w:gz") as tar: for file in source.iterdir(): tar.add(file, arcname=file.relative_to(source)) # string absolute long path buff.seek(0) # reset to 0. because... you knew. + # calc tar hash and check whenever it already builded + hash_digest = hashlib.sha256(buff.getbuffer()).hexdigest() + + try: + tags_resp = await self.drca.get_tags( + ImageName.parse(destination), + ) + + if hash_digest in tags_resp.tags["tags"]: + logger.info( + f"{hash_digest} found in {tags_resp.tags = } for {destination = }, not building this anymore", + ) + return destination + + except ClientResponseError as ex: + if ex.status != 404: + raise + logger.info(f"No image for {destination = } exists so far") + size = len(buff.getbuffer()) await self.s3.put_object( self.BUILD_BUCKET_NAME, @@ -190,9 +223,7 @@ async def build( f"Uploaded archive from {source} ({size = }) as 's3://{self.BUILD_BUCKET_NAME}/{raw_img_name}'", ) - # some customization - destination = destination_override or self.get_image_name(build_name) - + # more customization volume_mounts: list[VolumeMount] = [] volumes: list[Volume] = [] for secret in secrets or []: @@ -321,7 +352,15 @@ async def build( newlines=False, ): logger.trace(f"Building {name!r}: {line}") + logger.info(f"{name!r} builded as {destination!r}") + + # upload special caching tag + img_name = ImageName.parse(destination) + manifest = await self.drca.get_manifest(img_name) + patched_img = img_name.clone().set_tag(hash_digest) + await self.drca.put_manifest(patched_img, manifest.manifest) + return destination async def oneshot(self, name: str) -> None: diff --git a/requirements-dynamic.txt b/requirements-dynamic.txt index 0c7a660..09eb9ab 100644 --- a/requirements-dynamic.txt +++ b/requirements-dynamic.txt @@ -13,3 +13,5 @@ loguru==0.7.2 ruamel.yaml==0.18.6 cyclopts==2.9.3 + +docker-registry-client-async==0.2.11 From f0b7a3ffe49c62a38d240b8919d12b89a05579e8 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 7 Aug 2024 18:12:08 +0300 Subject: [PATCH 17/42] Dev dynamic tasks app --- dynamic_tasks_app/__init__.py | 50 -- dynamic_tasks_app/__main__.py | 69 ++- dynamic_tasks_app/connectors/__init__.py | 155 ++++- dynamic_tasks_app/connectors/errors.py | 2 + dynamic_tasks_app/connectors/kub/__init__.py | 552 +----------------- dynamic_tasks_app/connectors/kub/api.py | 518 ++++++++++++++++ dynamic_tasks_app/controllers/__init__.py | 2 + .../controllers/expiration_controller.py | 112 ++++ .../controllers/ports_controller.py | 42 ++ dynamic_tasks_app/utils/__init__.py | 0 dynamic_tasks_app/utils/asc.py | 14 + dynamic_tasks_app/web.py | 81 +++ 12 files changed, 997 insertions(+), 600 deletions(-) create mode 100644 dynamic_tasks_app/connectors/errors.py create mode 100644 dynamic_tasks_app/connectors/kub/api.py create mode 100644 dynamic_tasks_app/controllers/__init__.py create mode 100644 dynamic_tasks_app/controllers/expiration_controller.py create mode 100644 dynamic_tasks_app/controllers/ports_controller.py create mode 100644 dynamic_tasks_app/utils/__init__.py create mode 100644 dynamic_tasks_app/utils/asc.py create mode 100644 dynamic_tasks_app/web.py diff --git a/dynamic_tasks_app/__init__.py b/dynamic_tasks_app/__init__.py index f42d0fd..e69de29 100644 --- a/dynamic_tasks_app/__init__.py +++ b/dynamic_tasks_app/__init__.py @@ -1,50 +0,0 @@ -from contextlib import asynccontextmanager - -from fastapi import APIRouter, FastAPI - -from .config import settings -from .connectors import DynamicTaskInfo -from .connectors.kub import KubeConnector - -# WTF: tmp for dev -connector = KubeConnector() - - -@asynccontextmanager -async def lifespan(app: FastAPI): - await connector.init() - try: - yield - finally: - await connector.close() - - -app = FastAPI( - lifespan=lifespan, -) - -# TODO: token check -router = APIRouter( - prefix="/api", - tags=["api"], -) - - -@router.post("/start") -async def api_start(task_info: DynamicTaskInfo): - return await connector.start(task_info) - - -@router.post("/stop") -async def api_stop(task_info: DynamicTaskInfo): - return await connector.stop(task_info) - - -@router.post("/restart") -async def api_restart(task_info: DynamicTaskInfo): - return await connector.restart(task_info) - - -@router.post("/info") -async def api_info(task_info: DynamicTaskInfo): - return await connector.info(task_info) diff --git a/dynamic_tasks_app/__main__.py b/dynamic_tasks_app/__main__.py index 456c928..4d0b748 100644 --- a/dynamic_tasks_app/__main__.py +++ b/dynamic_tasks_app/__main__.py @@ -1,4 +1,5 @@ import asyncio +from contextlib import AsyncExitStack from pathlib import Path from cyclopts import App @@ -17,16 +18,24 @@ async def build( source: Path = Path(__file__).resolve().parent / "extra", name: str = "yatb-k8s-builder-base", tag: str = "latest", + registry: str = "docker.io/rubikoid", ) -> None: - async with ( - KubeConnector() as x, - x.api.docker_config_json_secret(docker_login, docker_password) as docker_json_secret, - ): + source = source.resolve() + + async with AsyncExitStack() as exit_stack: + x = await exit_stack.enter_async_context(KubeConnector()) + + secrets = [] + if registry == "docker.io/rubikoid": + raw_docker_json_secret = x.api.docker_config_json_secret(docker_login, docker_password) + docker_json_secret = await exit_stack.enter_async_context(raw_docker_json_secret) + secrets.append(docker_json_secret) + await x.api.build( name, source, - destination_override=f"rubikoid/yatb-k8s-builder-base:{tag}", - secrets=[docker_json_secret], + destination_override=f"{registry}/{name}:{tag}", + secrets=secrets, ) @@ -35,32 +44,54 @@ async def run_service( src: Path, name: str | None = None, flag: str | None = None, + *, + skip_build: bool = False, ) -> None: src = src.resolve() compose = load_compose(src) name = name or src.name - flag = flag or "crab{TEST}" + flag = flag or "flag{TEST}" async with KubeConnector() as x: - await x.api.service(name, compose, "flag{TEST}") + stack = await x.api.service( + name, + compose, + flag, + host=x.api._BASE_IP, + port=31337, + skip_build=skip_build, + ) + input("...?>") + await stack.aclose() @app.command() -async def test(): - async with KubeConnector() as x: - src = Path("dynamic_tasks_app") / "tests" / "examples" / "service" - src = src.resolve() - compose = load_compose(src) - await x.api.service("test-svc", compose, "flag{TEST}") +async def test_service() -> None: + src = Path("dynamic_tasks_app") / "tests" / "examples" / "service" + src = src.resolve() - await x.test() + compose = load_compose(src) - # src = Path("dynamic_tasks_app") / "tests" / "examples" / "builder" - # src = src.resolve() - # name = "test-image" + name = "test-serivce" + flag = "flag{TEST}" - # await x.api.build(name, src) + async with KubeConnector() as x: + stack = await x.api.service( + name, + compose, + flag, + host=x.api._BASE_IP, + port=31337, + ) + input("...?>") + await stack.aclose() + + +@app.command() +async def test(): + async with KubeConnector() as x: + await x.test() if __name__ == "__main__": diff --git a/dynamic_tasks_app/connectors/__init__.py b/dynamic_tasks_app/connectors/__init__.py index 80ffa9e..85d1751 100644 --- a/dynamic_tasks_app/connectors/__init__.py +++ b/dynamic_tasks_app/connectors/__init__.py @@ -1,11 +1,19 @@ +import datetime from abc import ABC, abstractmethod +from contextlib import AsyncExitStack +from dataclasses import dataclass from enum import Enum from types import TracebackType -from typing import AsyncContextManager, Self -from uuid import UUID +from typing import Self +from uuid import UUID, uuid4 +from loguru import logger from pydantic import BaseModel +from ..controllers import ExpirationController, PortsController +from ..controllers.ports_controller import HostPortPair +from .errors import GenericConnectorError + class DynamicTaskType(Enum): BUILDER = "builder" @@ -13,13 +21,79 @@ class DynamicTaskType(Enum): class DynamicTaskInfo(BaseModel): + name: str descriptor: UUID + type: DynamicTaskType user_id: str +@dataclass +class LocalTaskInfo: + id: UUID + + task_descriptor: UUID + user_id: str + + _info: DynamicTaskInfo + + expiration_id: UUID | None = None + hp: HostPortPair | None = None + + @property + def hp_ok(self) -> HostPortPair: + if not self.hp: + raise Exception + return self.hp + + @property + def expiration_id_ok(self) -> UUID: + if not self.expiration_id: + raise Exception + return self.expiration_id + + @classmethod + def build( + cls, + info: DynamicTaskInfo, + ) -> Self: + return cls( + id=uuid4(), + task_descriptor=info.descriptor, + user_id=info.user_id, + _info=info, + ) + + +class ExternalDynamicTaskInfo(BaseModel): + id: UUID + + task_descriptor: UUID + + user_id: str + + hp: HostPortPair + + least_time: datetime.timedelta + + class BaseConnector(ABC): + tasks: dict[UUID, LocalTaskInfo] + tasks_index: dict[tuple[UUID, str], LocalTaskInfo] + + expiration_controller: ExpirationController + ports_controller: PortsController + + def __init__(self, expiration_controller: ExpirationController, ports_controller: PortsController) -> None: + super().__init__() + + self.tasks = {} + self.tasks_index = {} + + self.expiration_controller = expiration_controller + self.ports_controller = ports_controller + async def __aenter__(self) -> Self: await self.init() return self @@ -34,24 +108,83 @@ async def __aexit__( @abstractmethod async def init(self) -> None: - pass + raise NotImplementedError - @abstractmethod async def close(self) -> None: - pass + await self.ports_controller.close() + await self.expiration_controller.close() @abstractmethod - async def start(self, task_info: DynamicTaskInfo) -> None: + async def _start(self, task_info: LocalTaskInfo) -> AsyncExitStack: raise NotImplementedError @abstractmethod - async def stop(self, task_info: DynamicTaskInfo) -> None: + async def _stop(self, task_info: LocalTaskInfo) -> None: raise NotImplementedError @abstractmethod - async def restart(self, task_info: DynamicTaskInfo) -> None: + async def _restart(self, task_info: LocalTaskInfo) -> None: raise NotImplementedError - @abstractmethod - async def info(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError + async def _info(self, ltask_info: LocalTaskInfo) -> ExternalDynamicTaskInfo: + expiration_info = self.expiration_controller.get(ltask_info.expiration_id_ok) + + return ExternalDynamicTaskInfo( + id=ltask_info.id, + task_descriptor=ltask_info.task_descriptor, + user_id=ltask_info.user_id, + hp=ltask_info.hp_ok, + least_time=expiration_info.time_left, + ) + + def init_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: + k = (task_info.descriptor, task_info.user_id) + + if k in self.tasks_index: + raise GenericConnectorError("This task for your team already exsits") + + self.tasks_index[k] = LocalTaskInfo.build(task_info) + return self.tasks_index[k] + + def get_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: + k = (task_info.descriptor, task_info.user_id) + + if k not in self.tasks_index: + raise GenericConnectorError("No task found") + + return self.tasks_index[k] + + async def start(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: + ltask_info = self.init_ltask_info(task_info) + ltask_info.hp = self.ports_controller.get_host_and_port() + + logger.info(f"Got port {ltask_info.hp = }") + + stack = await self._start(ltask_info) + stack.callback(lambda: self.ports_controller.free_port(ltask_info.hp_ok)) + + info = await self.expiration_controller.push_stack(stack) + logger.info(f"Pushed {info = }") + ltask_info.expiration_id = info.id + + return await self._info(ltask_info) + + async def stop(self, task_info: DynamicTaskInfo) -> None: + ltask_info = self.get_ltask_info(task_info) + await self._restart(ltask_info) + + async def restart(self, task_info: DynamicTaskInfo) -> None: + ltask_info = self.get_ltask_info(task_info) + await self._restart(ltask_info) + + async def extend(self, task_info: DynamicTaskInfo) -> None: + ltask_info = self.get_ltask_info(task_info) + self.expiration_controller.extend_life(ltask_info.expiration_id_ok, datetime.timedelta(minutes=1)) + + async def info_task(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: + ltask_info = self.get_ltask_info(task_info) + return await self._info(ltask_info) + + async def info_id(self, dynamic_task_id: UUID) -> ExternalDynamicTaskInfo: + ltask_info = self.tasks[dynamic_task_id] + return await self._info(ltask_info) diff --git a/dynamic_tasks_app/connectors/errors.py b/dynamic_tasks_app/connectors/errors.py new file mode 100644 index 0000000..f1e396c --- /dev/null +++ b/dynamic_tasks_app/connectors/errors.py @@ -0,0 +1,2 @@ +class GenericConnectorError(Exception): + pass diff --git a/dynamic_tasks_app/connectors/kub/__init__.py b/dynamic_tasks_app/connectors/kub/__init__.py index 87163a3..8e2f654 100644 --- a/dynamic_tasks_app/connectors/kub/__init__.py +++ b/dynamic_tasks_app/connectors/kub/__init__.py @@ -1,527 +1,25 @@ -import asyncio -import base64 -import hashlib -import io -import json -import random -import string -import tarfile -from collections.abc import AsyncGenerator, AsyncIterable -from contextlib import AsyncExitStack, asynccontextmanager -from pathlib import Path, PurePosixPath -from typing import TypeGuard, TypeVar +from contextlib import AsyncExitStack -from aiohttp.client_exceptions import ClientResponseError -from docker_registry_client_async import DockerRegistryClientAsync, FormattedSHA256, ImageName, Manifest -from lightkube import operators as op -from lightkube.config.kubeconfig import KubeConfig -from lightkube.core.async_client import AsyncClient -from lightkube.core.exceptions import ApiError -from lightkube.models.apps_v1 import DeploymentSpec -from lightkube.models.batch_v1 import JobSpec -from lightkube.models.core_v1 import ( - Container, - ContainerPort, - EnvVar, - EnvVarSource, - KeyToPath, - PodSpec, - PodTemplateSpec, - SecretKeySelector, - SecretVolumeSource, - ServicePort, - ServiceSpec, - Volume, - VolumeMount, -) -from lightkube.models.meta_v1 import ObjectMeta -from lightkube.resources.apps_v1 import Deployment -from lightkube.resources.batch_v1 import Job -from lightkube.resources.core_v1 import Namespace, Node, Pod, Secret, Service -from lightkube.types import CascadeType from loguru import logger -from miniopy_async import Minio from ...config import settings -from .. import BaseConnector, DynamicTaskInfo -from ..compose import Compose -from .client import AsyncClientEx, ImpossibleError, check_meta - -_T = TypeVar("_T") - - -async def async_to_list(source: AsyncIterable[_T]) -> list[_T]: - return [t async for t in source] - - -async def async_first(source: AsyncIterable[_T]) -> _T: - async for t in source: - return t - raise Exception("not found") - - -class KubeApi: - _BASE_IP: str = "192.168.1.44" - - BUILD_BUCKET_NAME: str = "dynamic-tasks-build-source" - BUILD_NAMESPACE: str = "yatb-build" - - RUN_NAMESPACE: str = "yatb-run" # not used now... :hm" - - client: AsyncClientEx - s3: Minio - drca: DockerRegistryClientAsync - - async def init(self) -> None: - # setup kube - config = KubeConfig.from_file(settings.kube_config_path) if settings.kube_config_path else None - self.client = AsyncClientEx(config) # type: ignore # lib broken - - # setup s3 - self.s3 = Minio( - endpoint=settings.s3_endpoint, - access_key=settings.S3_ACCESS, - secret_key=settings.S3_SECRET, - secure=False, # http for False, https for True - ) - - DockerRegistryClientAsync.DEFAULT_PROTOCOL = "http" # FIXME: tmp - self.drca = DockerRegistryClientAsync() - - # setup buckets - await self.setup_s3() - - # setup namespaces - await self.setup_namespaces() - - async def close(self) -> None: - await self.client.close() - await self.drca.close() - - async def setup_namespaces(self) -> None: - for ns in [self.BUILD_NAMESPACE]: - logger.info(f"Checking for {ns = } existance") - try: - res = await self.client.get(Namespace, ns) - except ApiError as ex: - if ex.status.code != 404: # noqa: PLR2004 - logger.warning(f"{ex = } {ex.status = }") - raise - - logger.info(f"{ns = } not found, creating") - res = await self.client.create(Namespace(metadata=ObjectMeta(name=ns))) - logger.info(f"{res = } created") - else: - logger.info(f"{ns = } exists") - - async def setup_network(self) -> None: - # TODO: fix me - pass - - async def setup_s3(self) -> None: - if not await self.s3.bucket_exists(self.BUILD_BUCKET_NAME): - await self.s3.make_bucket(self.BUILD_BUCKET_NAME) - - @classmethod - def generate_name(cls, alphabet: str = string.digits + string.ascii_lowercase, n: int = 16) -> str: - return "".join(random.choices(alphabet, k=n)) # noqa: S311 - - def get_image_name(self, name: str) -> str: - return f"{self._BASE_IP}:5000/prebuild-images/{name}:latest" - - def fix_image_name(self, src: str) -> str: - return src.replace(f"{self._BASE_IP}:5000", "registry.local") - - @asynccontextmanager - async def docker_config_json_secret( - self, - docker_login: str, - docker_password: str, - name: str | None = None, - ) -> AsyncGenerator[Secret, None]: - name = name or f"dockerconfig-{docker_login}" - - auth = base64.b64encode(f"{docker_login}:{docker_password}".encode()).decode() - raw_secret = {"auths": {"https://index.docker.io/v1/": {"auth": auth}}} - encoded_secret = base64.b64encode(json.dumps(raw_secret).encode()).decode() - - async with self.client.ctx( - Secret( - metadata=ObjectMeta( - name=name, - namespace=self.BUILD_NAMESPACE, - annotations={ - "rubikoid.ru/mountVolume-path": "/kaniko/.docker/config.json", - "rubikoid.ru/mountVolume-key": ".dockerconfigjson", - "rubikoid.ru/mountVolume-ro": "True", - }, - ), - type="kubernetes.io/dockerconfigjson", - immutable=True, - data={".dockerconfigjson": encoded_secret}, - ), - ) as secret: - yield secret - - async def build( - self, - name: str, - source: Path, - *, - destination_override: str | None = None, - secrets: list[Secret] | None = None, - dockerfile: Path | str = Path("Dockerfile"), - skip_build: bool = False, - ) -> str: - assert source.is_absolute() - assert source.is_dir() - assert (source / dockerfile).exists() - - build_name = name # self.generate_name() - raw_img_name = f"{build_name}.tar.gz" - - # some customization - destination = destination_override or self.get_image_name(build_name) - - if skip_build: - return destination - - with io.BytesIO() as buff: - with tarfile.open(fileobj=buff, mode="w:gz") as tar: - for file in source.iterdir(): - tar.add(file, arcname=file.relative_to(source)) # string absolute long path - buff.seek(0) # reset to 0. because... you knew. - - # calc tar hash and check whenever it already builded - hash_digest = hashlib.sha256(buff.getbuffer()).hexdigest() - - try: - tags_resp = await self.drca.get_tags( - ImageName.parse(destination), - ) - - if hash_digest in tags_resp.tags["tags"]: - logger.info( - f"{hash_digest} found in {tags_resp.tags = } for {destination = }, not building this anymore", - ) - return destination - - except ClientResponseError as ex: - if ex.status != 404: - raise - logger.info(f"No image for {destination = } exists so far") - - size = len(buff.getbuffer()) - await self.s3.put_object( - self.BUILD_BUCKET_NAME, - raw_img_name, - buff, - length=size, - ) - - logger.info( - f"Uploaded archive from {source} ({size = }) as 's3://{self.BUILD_BUCKET_NAME}/{raw_img_name}'", - ) - - # more customization - volume_mounts: list[VolumeMount] = [] - volumes: list[Volume] = [] - for secret in secrets or []: - if not check_meta(secret.metadata) or not secret.metadata.annotations: - raise ImpossibleError - - volume_name = f"{secret.metadata.name}-volume" - secret_key = secret.metadata.annotations["rubikoid.ru/mountVolume-key"] - volumes.append( - Volume( - name=volume_name, - secret=SecretVolumeSource( - secretName=secret.metadata.name, - items=[ - KeyToPath( - secret_key, - path=secret_key, - ) - ], - ), - ) - ) - - mount_path = secret.metadata.annotations["rubikoid.ru/mountVolume-path"] - read_only = bool(secret.metadata.annotations.get("rubikoid.ru/mountVolume-ro", "True")) - volume_mounts.append( - VolumeMount( - name=volume_name, - mountPath=mount_path, - readOnly=read_only, - subPath=secret_key, - ), - ) - - _kaniko = self.client.simple_job( - f"kaniko-build-{build_name}", - namespace=self.BUILD_NAMESPACE, - pod_spec=PodSpec( - containers=[ - Container( - name="kaniko", - # image="rubikoid/yatb-k8s-builder-base:base", # "gcr.io/kaniko-project/executor:v1.23.2", - image="gcr.io/kaniko-project/executor:v1.23.2", - args=[ - f"--dockerfile={PurePosixPath('/kaniko/buildcontext') / dockerfile}", - f"--context=s3://{self.BUILD_BUCKET_NAME}/{raw_img_name}", - f"--destination={destination}", - "--cache=true", - "--cache-run-layers=true", - "--cache-copy-layers=true", - f"--cache-repo={self._BASE_IP}:5000/cache", - ], - # args=[ - # "-c", - # """ - # env; - # ls -la /kaniko/.docker; - # ls -la /kaniko/.docker/config.json; - # cat /kaniko/.docker/config.json; - # """.strip(), - # ], - env=[ - EnvVar( - "S3_ENDPOINT", - value=f"http://{self._BASE_IP}:{settings.S3_PORT}", - ), - # need to specify this to use path-stye minio, - # and don't try to resolve http://bucket.ip:port/file - EnvVar("S3_FORCE_PATH_STYLE", "true"), - # i have AWS. Don't work without this - EnvVar("AWS_REGION", "us-east-1"), # i have AWS - EnvVar("AWS_ACCESS_KEY_ID", settings.S3_ACCESS), - EnvVar("AWS_SECRET_ACCESS_KEY", settings.S3_SECRET), - ], - volumeMounts=volume_mounts, - ), - ], - volumes=volumes, - restartPolicy="Never", - ), - ) - - async with self.client.ctx(_kaniko, cascade=CascadeType.FOREGROUND) as kaniko: - if not check_meta(kaniko.metadata): - raise ImpossibleError - - logger.info("Wait for job ready") - await self.client.wait_ex( - Job, - kaniko.metadata.name, - namespace=kaniko.metadata.namespace, - cb=lambda x: x.get("ready", 0) == 1, - ) - - kaniko_pod = await async_first( - self.client.list( - Pod, - labels={"app.kubernetes.io/name": op.equal(kaniko.metadata.name)}, - namespace=kaniko.metadata.namespace, - ), - ) - - if not check_meta(kaniko_pod.metadata): - raise ImpossibleError - - logger.info("Waiting for kaniko pod be ready") - - kaniko_pod = await self.client.wait( - Pod, - kaniko_pod.metadata.name, - for_conditions=["PodReadyToStartContainers"], - namespace=kaniko.metadata.namespace, - ) - - if not check_meta(kaniko_pod.metadata): - raise ImpossibleError - - logger.info( - f"Kaniko pod created: '{kaniko_pod.metadata.namespace}.{kaniko_pod.metadata.name}'", - ) - - async for line in self.client.log( - kaniko_pod.metadata.name, - namespace=kaniko_pod.metadata.namespace, - follow=True, - newlines=False, - ): - logger.trace(f"Building {name!r}: {line}") - - logger.info(f"{name!r} builded as {destination!r}") - - # upload special caching tag - img_name = ImageName.parse(destination) - manifest = await self.drca.get_manifest(img_name) - patched_img = img_name.clone().set_tag(hash_digest) - await self.drca.put_manifest(patched_img, manifest.manifest) - - return destination - - async def oneshot(self, name: str) -> None: - image = self.get_image_name(name) - - @asynccontextmanager - async def run_ns(self, name: str) -> AsyncGenerator[tuple[str, Namespace], None]: - run_prefix = self.generate_name() - res = await self.client.create( - Namespace( - metadata=ObjectMeta( - name=f"{name}-{run_prefix}", - ) - ) - ) - - if not res.metadata or not res.metadata.name: - raise ImpossibleError(f"{res = }") - - try: - yield res.metadata.name, res - finally: - await self.client.delete( - Namespace, - name=res.metadata.name, - grace_period=0, - cascade=CascadeType.FOREGROUND, - ) - logger.info(f"Cleaned namespace '{res.metadata.name}'") - - async def service(self, name: str, compose: Compose, flag: str) -> None: - # images: dict[str, str] = {} - containers: dict[str, Container] = {} - # build stage - for svc_name, svc in compose.services.items(): - if not svc.build: - if not svc.image: - raise Exception("no") - image = svc.image - elif isinstance(svc.build, Path): - # TODO: do not build on every run - image = await self.build(f"{name}-{svc_name}", svc.build) - else: - image = await self.build( - f"{name}-{svc_name}", - svc.build.context, - dockerfile=svc.build.dockerfile, - ) - - image = self.fix_image_name(image) - - containers[svc_name] = Container( - name=svc_name, - image=image, - command=svc.prepared_command, - ports=[ContainerPort(port.internal_port) for port in svc.ports], - # env=[], - ) - - def patch_container(container: Container, secret: Secret, key: str) -> Container: - if not check_meta(secret.metadata): - raise ImpossibleError - - if container.env is None: - container.env = [] - container.env.append( - EnvVar( - name=key, - valueFrom=EnvVarSource( - secretKeyRef=SecretKeySelector( - name=secret.metadata.name, - key=key, - ), - ), - ), - ) - - return container - - # run stage - async with ( - self.run_ns(f"{name}") as (ns_name, ns), - self.client.ctx( - Secret( - metadata=ObjectMeta( - name=f"{name}-flag", - namespace=ns_name, - ), - immutable=True, - stringData={"FLAG": flag}, - ), - ) as flag_secret, - AsyncExitStack() as deployments_exit_stack, - ): - services: list[Service] = [] - - for svc_name, container in containers.items(): - patch_container(container, flag_secret, "FLAG") - - if container.ports: - port = container.ports[0] # TODO: handle multiple ports... - service = self.client.ctx( - self.client.simple_service( - svc_name, - ns_name, - 31337, - port.containerPort, - [self._BASE_IP], - ), - ) - service = await deployments_exit_stack.enter_async_context(service) - services.append(service) - - deployment = self.client.ctx( - self.client.simple_deployment( - svc_name, - ns_name, - PodSpec(containers=[container]), - ), - ) - await deployments_exit_stack.enter_async_context(deployment) - - for service in services: - if not service.spec or not service.spec.externalIPs or not service.spec.ports: - raise ImpossibleError - - logger.info(f"{service = }") - - addr = f"http://{service.spec.externalIPs[0]}:{service.spec.ports[0].nodePort}" - - logger.info( - f"Started at {service.spec.externalIPs} -> {[i.nodePort for i in service.spec.ports]}; " - f"{addr = }; " # ... - f"{service.spec.clusterIPs = }", - ) - - input(f"...?: ") - - async def test(self): - logger.info("Simple cluster status:") - async for ns in self.client.list(Namespace): - if not ns.metadata or not ns.metadata.name: - logger.warning(f"{ns = } no metadata or name") - continue - - logger.info(f"Found ns: {ns.metadata.name}") - - async for pod in self.client.list(Pod, namespace="*"): - if not pod.metadata or not pod.status: - logger.warning(f"{pod = } no metadata / status") - continue - - logger.info(f"{pod.metadata.namespace}.{pod.metadata.name}: {pod.status.podIPs}") +from ...controllers import ExpirationController, PortsController +from .. import BaseConnector, DynamicTaskInfo, LocalTaskInfo +from ..compose import Compose, load_compose +from .api import KubeApi class KubeConnector(BaseConnector): api: KubeApi - def __init__(self) -> None: + def __init__( + self, + ) -> None: # , expiration_controller: ExpirationController, ports_controller: PortsController) -> None: self.api = KubeApi() - super().__init__() + # FIXME: temp for dev + expiration_controller = ExpirationController() + ports_controller = PortsController() + super().__init__(expiration_controller=expiration_controller, ports_controller=ports_controller) async def init(self) -> None: await self.api.init() @@ -530,16 +28,30 @@ async def test(self) -> None: await self.api.test() async def close(self) -> None: + await super().close() await self.api.close() - async def start(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError + async def _start(self, task_info: LocalTaskInfo) -> AsyncExitStack: + logger.info(f"Got {task_info = }, resolving path and compose file") - async def stop(self, task_info: DynamicTaskInfo) -> None: - raise NotImplementedError + src = settings.UUID_TO_PATH_MAPPING[task_info.task_descriptor] + src = src.resolve() + + compose = load_compose(src) + + logger.info(f"Loaded {compose = }") + + return await self.api.service( + task_info._info.name, + compose, + flag="crab{test}", + host=task_info.hp_ok.host, + port=task_info.hp_ok.port, + skip_build=True, + ) - async def restart(self, task_info: DynamicTaskInfo) -> None: + async def _stop(self, task_info: LocalTaskInfo) -> None: raise NotImplementedError - async def info(self, task_info: DynamicTaskInfo) -> None: + async def _restart(self, task_info: LocalTaskInfo) -> None: raise NotImplementedError diff --git a/dynamic_tasks_app/connectors/kub/api.py b/dynamic_tasks_app/connectors/kub/api.py new file mode 100644 index 0000000..f358b23 --- /dev/null +++ b/dynamic_tasks_app/connectors/kub/api.py @@ -0,0 +1,518 @@ +import base64 +import hashlib +import io +import json +import random +import string +import tarfile +from collections.abc import AsyncGenerator +from contextlib import AsyncExitStack, asynccontextmanager +from pathlib import Path, PurePosixPath + +from aiohttp.client_exceptions import ClientResponseError +from docker_registry_client_async import DockerRegistryClientAsync, ImageName +from lightkube import operators as op +from lightkube.config.kubeconfig import KubeConfig +from lightkube.core.exceptions import ApiError +from lightkube.models.core_v1 import ( + Container, + ContainerPort, + EnvVar, + EnvVarSource, + KeyToPath, + PodSpec, + SecretKeySelector, + SecretVolumeSource, + Volume, + VolumeMount, +) +from lightkube.models.meta_v1 import ObjectMeta +from lightkube.resources.batch_v1 import Job +from lightkube.resources.core_v1 import Namespace, Pod, Secret, Service +from lightkube.types import CascadeType +from loguru import logger +from miniopy_async import Minio + +from ...config import settings +from ...utils.asc import async_first +from ..compose import Compose +from .client import AsyncClientEx, ImpossibleError, check_meta + + +class KubeApi: + _BASE_IP: str = "192.168.1.44" + + BUILD_BUCKET_NAME: str = "dynamic-tasks-build-source" + BUILD_NAMESPACE: str = "yatb-build" + + RUN_NAMESPACE: str = "yatb-run" # not used now... :hm" + + client: AsyncClientEx + s3: Minio + drca: DockerRegistryClientAsync + + async def init(self) -> None: + # setup kube + config = KubeConfig.from_file(settings.kube_config_path) if settings.kube_config_path else None + self.client = AsyncClientEx(config) # type: ignore # lib broken + + # setup s3 + self.s3 = Minio( + endpoint=settings.s3_endpoint, + access_key=settings.S3_ACCESS, + secret_key=settings.S3_SECRET, + secure=False, # http for False, https for True + ) + + DockerRegistryClientAsync.DEFAULT_PROTOCOL = "http" # FIXME: tmp + self.drca = DockerRegistryClientAsync() + + # setup buckets + await self.setup_s3() + + # setup namespaces + await self.setup_namespaces() + + async def close(self) -> None: + await self.client.close() + await self.drca.close() + + async def setup_namespaces(self) -> None: + for ns in [self.BUILD_NAMESPACE]: + logger.info(f"Checking for {ns = } existance") + try: + res = await self.client.get(Namespace, ns) + except ApiError as ex: + if ex.status.code != 404: # noqa: PLR2004 + logger.warning(f"{ex = } {ex.status = }") + raise + + logger.info(f"{ns = } not found, creating") + res = await self.client.create(Namespace(metadata=ObjectMeta(name=ns))) + logger.info(f"{res = } created") + else: + logger.info(f"{ns = } exists") + + async def setup_network(self) -> None: + # TODO: fix me + pass + + async def setup_s3(self) -> None: + if not await self.s3.bucket_exists(self.BUILD_BUCKET_NAME): + await self.s3.make_bucket(self.BUILD_BUCKET_NAME) + + @classmethod + def generate_name(cls, alphabet: str = string.digits + string.ascii_lowercase, n: int = 16) -> str: + return "".join(random.choices(alphabet, k=n)) # noqa: S311 + + def get_image_name(self, name: str) -> str: + return f"{self._BASE_IP}:5000/prebuild-images/{name}:latest" + + def fix_image_name(self, src: str) -> str: + return src.replace(f"{self._BASE_IP}:5000", "registry.local") + + @asynccontextmanager + async def docker_config_json_secret( + self, + docker_login: str, + docker_password: str, + name: str | None = None, + ) -> AsyncGenerator[Secret, None]: + name = name or f"dockerconfig-{docker_login}" + + auth = base64.b64encode(f"{docker_login}:{docker_password}".encode()).decode() + raw_secret = {"auths": {"https://index.docker.io/v1/": {"auth": auth}}} + encoded_secret = base64.b64encode(json.dumps(raw_secret).encode()).decode() + + async with self.client.ctx( + Secret( + metadata=ObjectMeta( + name=name, + namespace=self.BUILD_NAMESPACE, + annotations={ + "rubikoid.ru/mountVolume-path": "/kaniko/.docker/config.json", + "rubikoid.ru/mountVolume-key": ".dockerconfigjson", + "rubikoid.ru/mountVolume-ro": "True", + }, + ), + type="kubernetes.io/dockerconfigjson", + immutable=True, + data={".dockerconfigjson": encoded_secret}, + ), + ) as secret: + yield secret + + async def build( + self, + name: str, + source: Path, + *, + destination_override: str | None = None, + secrets: list[Secret] | None = None, + dockerfile: Path | str = Path("Dockerfile"), + skip_build: bool = False, + ) -> str: + assert source.is_absolute() + assert source.is_dir() + assert (source / dockerfile).exists() + + build_name = name # self.generate_name() + raw_img_name = f"{build_name}.tar.gz" + + # some customization + destination = destination_override or self.get_image_name(build_name) + + if skip_build: + return destination + + with io.BytesIO() as buff: + with tarfile.open(fileobj=buff, mode="w:gz") as tar: + for file in source.iterdir(): + tar.add(file, arcname=file.relative_to(source)) # string absolute long path + buff.seek(0) # reset to 0. because... you knew. + + # calc tar hash and check whenever it already builded + hash_digest = hashlib.sha256(buff.getbuffer()).hexdigest() + + try: + tags_resp = await self.drca.get_tags( + ImageName.parse(destination), + ) + + if hash_digest in tags_resp.tags["tags"]: + logger.info( + f"{hash_digest} found in {tags_resp.tags = } for {destination = }, not building this anymore", + ) + return destination + else: + logger.info( + f"{hash_digest} not found in {tags_resp.tags = } for {destination = }, so building...", + ) + + except ClientResponseError as ex: + if ex.status != 404: + raise + logger.info(f"No image for {destination = } exists so far") + + size = len(buff.getbuffer()) + await self.s3.put_object( + self.BUILD_BUCKET_NAME, + raw_img_name, + buff, + length=size, + ) + + logger.info( + f"Uploaded archive from {source} ({size = }) as 's3://{self.BUILD_BUCKET_NAME}/{raw_img_name}'", + ) + + # more customization + volume_mounts: list[VolumeMount] = [] + volumes: list[Volume] = [] + for secret in secrets or []: + if not check_meta(secret.metadata) or not secret.metadata.annotations: + raise ImpossibleError + + volume_name = f"{secret.metadata.name}-volume" + secret_key = secret.metadata.annotations["rubikoid.ru/mountVolume-key"] + volumes.append( + Volume( + name=volume_name, + secret=SecretVolumeSource( + secretName=secret.metadata.name, + items=[ + KeyToPath( + secret_key, + path=secret_key, + ) + ], + ), + ) + ) + + mount_path = secret.metadata.annotations["rubikoid.ru/mountVolume-path"] + read_only = bool(secret.metadata.annotations.get("rubikoid.ru/mountVolume-ro", "True")) + volume_mounts.append( + VolumeMount( + name=volume_name, + mountPath=mount_path, + readOnly=read_only, + subPath=secret_key, + ), + ) + + _kaniko = self.client.simple_job( + f"kaniko-build-{build_name}", + namespace=self.BUILD_NAMESPACE, + pod_spec=PodSpec( + containers=[ + Container( + name="kaniko", + # image="rubikoid/yatb-k8s-builder-base:base", # "gcr.io/kaniko-project/executor:v1.23.2", + image="gcr.io/kaniko-project/executor:v1.23.2", + args=[ + f"--dockerfile={PurePosixPath('/kaniko/buildcontext') / dockerfile}", + f"--context=s3://{self.BUILD_BUCKET_NAME}/{raw_img_name}", + f"--destination={destination}", + "--cache=true", + "--cache-run-layers=true", + "--cache-copy-layers=true", + f"--cache-repo={self._BASE_IP}:5000/cache", + ], + # args=[ + # "-c", + # """ + # env; + # ls -la /kaniko/.docker; + # ls -la /kaniko/.docker/config.json; + # cat /kaniko/.docker/config.json; + # """.strip(), + # ], + env=[ + EnvVar( + "S3_ENDPOINT", + value=f"http://{self._BASE_IP}:{settings.S3_PORT}", + ), + # need to specify this to use path-stye minio, + # and don't try to resolve http://bucket.ip:port/file + EnvVar("S3_FORCE_PATH_STYLE", "true"), + # i have AWS. Don't work without this + EnvVar("AWS_REGION", "us-east-1"), # i have AWS + EnvVar("AWS_ACCESS_KEY_ID", settings.S3_ACCESS), + EnvVar("AWS_SECRET_ACCESS_KEY", settings.S3_SECRET), + ], + volumeMounts=volume_mounts, + ), + ], + volumes=volumes, + restartPolicy="Never", + ), + ) + + async with self.client.ctx(_kaniko, cascade=CascadeType.FOREGROUND) as kaniko: + if not check_meta(kaniko.metadata): + raise ImpossibleError + + logger.info("Wait for job ready") + await self.client.wait_ex( + Job, + kaniko.metadata.name, + namespace=kaniko.metadata.namespace, + cb=lambda x: x.get("ready", 0) == 1, + ) + + kaniko_pod = await async_first( + self.client.list( + Pod, + labels={"app.kubernetes.io/name": op.equal(kaniko.metadata.name)}, + namespace=kaniko.metadata.namespace, + ), + ) + + if not check_meta(kaniko_pod.metadata): + raise ImpossibleError + + logger.info("Waiting for kaniko pod be ready") + + kaniko_pod = await self.client.wait( + Pod, + kaniko_pod.metadata.name, + for_conditions=["PodReadyToStartContainers"], + namespace=kaniko.metadata.namespace, + ) + + if not check_meta(kaniko_pod.metadata): + raise ImpossibleError + + logger.info( + f"Kaniko pod created: '{kaniko_pod.metadata.namespace}.{kaniko_pod.metadata.name}'", + ) + + async for line in self.client.log( + kaniko_pod.metadata.name, + namespace=kaniko_pod.metadata.namespace, + follow=True, + newlines=False, + ): + logger.trace(f"Building {name!r}: {line}") + + logger.info(f"{name!r} builded as {destination!r}") + + # upload special caching tag + img_name = ImageName.parse(destination) + manifest = await self.drca.get_manifest(img_name) + patched_img = img_name.clone().set_tag(hash_digest) + await self.drca.put_manifest(patched_img, manifest.manifest) + + return destination + + async def oneshot(self, name: str) -> None: + image = self.get_image_name(name) + raise NotImplementedError + + @asynccontextmanager + async def run_in_ns(self, name: str) -> AsyncGenerator[tuple[str, Namespace], None]: + run_prefix = self.generate_name() + res = await self.client.create( + Namespace( + metadata=ObjectMeta( + name=f"{name}-{run_prefix}", + ) + ) + ) + + if not res.metadata or not res.metadata.name: + raise ImpossibleError(f"{res = }") + + try: + await self.setup_ns(res) + yield res.metadata.name, res + finally: + await self.client.delete( + Namespace, + name=res.metadata.name, + grace_period=0, + cascade=CascadeType.FOREGROUND, + ) + logger.info(f"Cleaned namespace '{res.metadata.name}'") + + async def setup_ns(self, ns: Namespace) -> None: + if not ns.metadata or not ns.metadata.name: + raise ImpossibleError(f"{ns = }") + + pass + + async def service( + self, + name: str, + compose: Compose, + flag: str, + host: str, + port: int, + *, + skip_build: bool = False, + ) -> AsyncExitStack: + # images: dict[str, str] = {} + containers: dict[str, Container] = {} + # build stage + for svc_name, svc in compose.services.items(): + if not svc.build: + if not svc.image: + raise Exception("no") + image = svc.image + elif isinstance(svc.build, Path): + # TODO: do not build on every run + image = await self.build(f"{name}-{svc_name}", svc.build, skip_build=skip_build) + else: + image = await self.build( + f"{name}-{svc_name}", + svc.build.context, + dockerfile=svc.build.dockerfile, + skip_build=skip_build, + ) + + image = self.fix_image_name(image) + + containers[svc_name] = Container( + name=svc_name, + image=image, + command=svc.prepared_command, + ports=[ContainerPort(port.internal_port) for port in svc.ports], + # env=[], + ) + + def patch_container(container: Container, secret: Secret, key: str) -> Container: + if not check_meta(secret.metadata): + raise ImpossibleError + + if container.env is None: + container.env = [] + container.env.append( + EnvVar( + name=key, + valueFrom=EnvVarSource( + secretKeyRef=SecretKeySelector( + name=secret.metadata.name, + key=key, + ), + ), + ), + ) + + return container + + stack = AsyncExitStack() + ns_name, ns = await stack.enter_async_context(self.run_in_ns(f"{name}")) + flag_secret = await stack.enter_async_context( + self.client.ctx( + Secret( + metadata=ObjectMeta( + name=f"{name}-flag", + namespace=ns_name, + ), + immutable=True, + stringData={"FLAG": flag}, + ), + ) + ) + + # run stage + services: list[Service] = [] + + for svc_name, container in containers.items(): + patch_container(container, flag_secret, "FLAG") + + if container.ports: + container_port = container.ports[0] # TODO: handle multiple ports... + service = self.client.ctx( + self.client.simple_service( + svc_name, + ns_name, + port, + container_port.containerPort, + [host], + ), + ) + service = await stack.enter_async_context(service) + services.append(service) + + deployment = self.client.ctx( + self.client.simple_deployment( + svc_name, + ns_name, + PodSpec(containers=[container]), + ), + ) + await stack.enter_async_context(deployment) + + for service in services: + if not service.spec or not service.spec.externalIPs or not service.spec.ports: + raise ImpossibleError + + # logger.info(f"{service = }") + + addr = f"http://{service.spec.externalIPs[0]}:{service.spec.ports[0].port}" + + logger.info( + f"Started at {service.spec.externalIPs} -> {[i.port for i in service.spec.ports]}; " + f"{addr = }; " # ... + f"{service.spec.clusterIPs = }", + ) + + return stack + + async def test(self): + logger.info("Simple cluster status:") + async for ns in self.client.list(Namespace): + if not ns.metadata or not ns.metadata.name: + logger.warning(f"{ns = } no metadata or name") + continue + + logger.info(f"Found ns: {ns.metadata.name}") + + async for pod in self.client.list(Pod, namespace="*"): + if not pod.metadata or not pod.status: + logger.warning(f"{pod = } no metadata / status") + continue + + logger.info(f"{pod.metadata.namespace}.{pod.metadata.name}: {pod.status.podIPs}") diff --git a/dynamic_tasks_app/controllers/__init__.py b/dynamic_tasks_app/controllers/__init__.py new file mode 100644 index 0000000..5fb41ec --- /dev/null +++ b/dynamic_tasks_app/controllers/__init__.py @@ -0,0 +1,2 @@ +from .expiration_controller import ExpirationController +from .ports_controller import PortsController diff --git a/dynamic_tasks_app/controllers/expiration_controller.py b/dynamic_tasks_app/controllers/expiration_controller.py new file mode 100644 index 0000000..bf6e71c --- /dev/null +++ b/dynamic_tasks_app/controllers/expiration_controller.py @@ -0,0 +1,112 @@ +import asyncio +import datetime +from contextlib import AsyncExitStack +from dataclasses import dataclass +from typing import Self +from uuid import UUID, uuid4 + +from loguru import logger + +DEFAULT_TTL = datetime.timedelta(minutes=1) + + +@dataclass +class StackInfo: + id: UUID + stack: AsyncExitStack + death_time: datetime.datetime + death_task: asyncio.Task | None = None + + @staticmethod + def now() -> datetime.datetime: + return datetime.datetime.now(tz=datetime.UTC) + + @classmethod + def build(cls, stack: AsyncExitStack, *, ttl: datetime.timedelta = DEFAULT_TTL) -> Self: + now = cls.now() + return cls( + id=uuid4(), + stack=stack, + death_time=now + ttl, + ) + + def extend_life(self, by: datetime.timedelta) -> None: + self.death_time += by + + async def die(self) -> None: + await self.stack.aclose() + + @property + def time_left(self) -> datetime.timedelta: + now = self.now() + return self.death_time - now + + @property + def is_expired(self) -> bool: + now = self.now() + return now > self.death_time + + def __repr__(self) -> str: + return ( + f"StackInfo({self.id = }, death = '{self.death_time}', " + f"time_left = '{self.time_left}', {self.is_expired = })" + ) + + +class ExpirationController: + root_stack: AsyncExitStack + stacks: dict[UUID, StackInfo] + + def __init__(self) -> None: + self.root_stack = AsyncExitStack() + self.stacks = {} + + def get(self, id: UUID) -> StackInfo: + return self.stacks[id] + + async def push_stack(self, stack: AsyncExitStack) -> StackInfo: + stack = await self.root_stack.enter_async_context(stack) + info = StackInfo.build(stack) + self.stacks[info.id] = info + + await self._create_death_task(info) + + logger.info(f"{info = } created") + + return info + + def extend_life(self, id: UUID, by: datetime.timedelta) -> StackInfo: + info = self.stacks[id] + info.extend_life(by) + + logger.info(f"Lifetime of {info = } extended") + + # TODO: recreate death task + + return info + + async def _create_death_task(self, info: StackInfo) -> None: + async def _task() -> None: + try: + await asyncio.sleep(info.time_left.seconds + 1) + except asyncio.CancelledError: + logger.info(f"{info = } death task got cancelled") + + if not info.is_expired: + logger.info(f"{info = } death task finished, but info is fresh, so restaring") + info.death_task = asyncio.create_task(_task()) + return + + logger.info(f"{info = } is expired") + await info.die() + del self.stacks[info.id] + logger.info(f"{info = } is cleaned") + + info.death_task = asyncio.create_task(_task()) + + async def close(self): + logger.info(f"{len(self.stacks) = } cleaning...") + + await self.root_stack.aclose() + + logger.info(f"{len(self.stacks) = } cleaned") diff --git a/dynamic_tasks_app/controllers/ports_controller.py b/dynamic_tasks_app/controllers/ports_controller.py new file mode 100644 index 0000000..23e51f6 --- /dev/null +++ b/dynamic_tasks_app/controllers/ports_controller.py @@ -0,0 +1,42 @@ +import random +from collections import defaultdict +from collections.abc import Iterator +from dataclasses import dataclass +from itertools import cycle + +from loguru import logger + +from ..config import settings + + +@dataclass +class HostPortPair: + host: str + port: int + + +class PortsController: + occupated_ports: dict[str, list[int]] + external_ips: Iterator[str] + + def __init__(self) -> None: + self.occupated_ports = defaultdict(list) + self.external_ips = cycle(settings.EXTERNAL_IPS) + + def get_host_and_port(self) -> HostPortPair: + host = next(self.external_ips) + occupated_ports = self.occupated_ports[host] + + while (port := random.randint(settings.PORT_START, settings.PORT_END)) in occupated_ports: # noqa: S311 + logger.info(f"Found occupated port: {port} ;( ") + + occupated_ports.append(port) + logger.info(f"Found free {host}:{port}") + return HostPortPair(host, port) + + def free_port(self, pair: HostPortPair) -> None: + occupated_ports = self.occupated_ports[pair.host] + occupated_ports.remove(pair.port) + + async def close(self): + pass diff --git a/dynamic_tasks_app/utils/__init__.py b/dynamic_tasks_app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dynamic_tasks_app/utils/asc.py b/dynamic_tasks_app/utils/asc.py new file mode 100644 index 0000000..9ba0987 --- /dev/null +++ b/dynamic_tasks_app/utils/asc.py @@ -0,0 +1,14 @@ +from collections.abc import AsyncIterable +from typing import TypeVar + +_T = TypeVar("_T") + + +async def async_to_list(source: AsyncIterable[_T]) -> list[_T]: + return [t async for t in source] + + +async def async_first(source: AsyncIterable[_T]) -> _T: + async for t in source: + return t + raise Exception("not found") diff --git a/dynamic_tasks_app/web.py b/dynamic_tasks_app/web.py new file mode 100644 index 0000000..8688ecd --- /dev/null +++ b/dynamic_tasks_app/web.py @@ -0,0 +1,81 @@ +from contextlib import asynccontextmanager +from uuid import UUID + +from fastapi import APIRouter, FastAPI, HTTPException, Request, status + +from dynamic_tasks_app.connectors import ExternalDynamicTaskInfo + +from .config import settings +from .connectors import DynamicTaskInfo, GenericConnectorError +from .connectors.kub import KubeConnector + +# WTF: tmp for dev +connector = KubeConnector() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + await connector.init() + try: + yield + finally: + await connector.close() + + +app = FastAPI( + lifespan=lifespan, + # middleware=[process_exception], +) + + +# TODO: token check +router = APIRouter( + prefix="/api", + tags=["api"], +) + + +@asynccontextmanager +async def execption_handler(): + try: + yield + except GenericConnectorError as ex: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail={ + "error": f"{ex!r}", + }, + ) from ex + + +@router.post("/start") +async def api_start(task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: + async with execption_handler(): + return await connector.start(task_info) + + +@router.post("/stop") +async def api_stop(task_info: DynamicTaskInfo): + async with execption_handler(): + return await connector.stop(task_info) + + +@router.post("/restart") +async def api_restart(task_info: DynamicTaskInfo): + async with execption_handler(): + return await connector.restart(task_info) + + +@router.post("/extend") +async def api_extend(task_info: DynamicTaskInfo): + async with execption_handler(): + return await connector.extend(task_info) + + +@router.post("/info") +async def api_info(task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: + async with execption_handler(): + return await connector.info_task(task_info) + + +app.include_router(router) From 3e707fe469d9013de9afb446e8bc1331d1c60c4f Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 7 Aug 2024 19:10:27 +0300 Subject: [PATCH 18/42] A little refactor api_tasks --- app/api/api_tasks.py | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/app/api/api_tasks.py b/app/api/api_tasks.py index 31e534d..64a2d86 100644 --- a/app/api/api_tasks.py +++ b/app/api/api_tasks.py @@ -1,5 +1,6 @@ import uuid from datetime import UTC, datetime +from typing import Annotated from fastapi import APIRouter, Depends, HTTPException, status @@ -16,6 +17,19 @@ ) +async def get_task(task_id: uuid.UUID, user: auth.CURR_USER_SAFE) -> TaskDB: + task = await TaskDB.find_by_task_uuid(task_id) + if not task or not task.visible_for_user(user): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No task", + ) + return task + + +CURRENT_TASK = Annotated[TaskDB, Depends(get_task)] + + @router.get("/") async def api_tasks_get(user: auth.CURR_USER_SAFE) -> list[schema.Task.public_model]: tasks = await TaskDB.get_all() @@ -31,6 +45,11 @@ class BRMessage(schema.EBaseModel): is_fb: bool +@router.get("/{task_id}") +async def api_task_get(task: CURRENT_TASK) -> schema.Task.public_model: + return task + + @router.post("/submit_flag") async def api_task_submit_flag(flag: schema.FlagForm, user: auth.CURR_USER) -> uuid.UUID: if datetime.now(tz=UTC) < settings.EVENT_START_TIME: @@ -103,14 +122,3 @@ async def api_task_submit_flag(flag: schema.FlagForm, user: auth.CURR_USER) -> u logger.error(f"tg_exception exception='{ex}'") return ret - - -@router.get("/{task_id}") -async def api_task_get(task_id: uuid.UUID, user: auth.CURR_USER_SAFE) -> schema.Task.public_model: - task = await TaskDB.find_by_task_uuid(task_id) - if not task or not task.visible_for_user(user): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="No task", - ) - return task From 855955c691cc93c305f564c3b55a9d7002175a02 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Wed, 7 Aug 2024 19:10:35 +0300 Subject: [PATCH 19/42] Prepare api_dynamic_tasks --- app/api/api_dynamic_tasks.py | 107 +++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 app/api/api_dynamic_tasks.py diff --git a/app/api/api_dynamic_tasks.py b/app/api/api_dynamic_tasks.py new file mode 100644 index 0000000..2a3f967 --- /dev/null +++ b/app/api/api_dynamic_tasks.py @@ -0,0 +1,107 @@ +from collections.abc import Callable +from typing import Annotated, Literal, Self, cast +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException, Request, Response, status +from httpx import AsyncClient +from pydantic import BaseModel + +from .. import auth, db, schema +from ..config import settings +from ..db.beanie import TaskDB, UserDB +from ..utils import metrics +from ..utils.log_helper import get_logger +from .api_tasks import CURRENT_TASK + +logger = get_logger("api.dynamic_tasks") + +router = APIRouter( + prefix="/dynamic", + tags=["dynamic_tasks"], +) + + +class DynamicTaskInfo(BaseModel): + name: str + descriptor: UUID + + type: schema.task.DynamicTaskType + + user_id: str + + @classmethod + def build(cls, task: schema.Task, user: schema.User) -> Self: + if not task.dynamic_task_type: + raise Exception("impossible") + + return cls( + name=f"{task.task_id}", + descriptor=task.task_id, + type=task.dynamic_task_type, + user_id=f"{user.user_id}", + ) + + +class DynamicTasksClient(AsyncClient): + def __init__(self) -> None: + if not settings.DYNAMIC_TASKS_CONTROLLER_TOKEN: + return + + self.headers["X-Token"] = settings.DYNAMIC_TASKS_CONTROLLER_TOKEN + + async def start(self, task_info: DynamicTaskInfo): + pass + + async def stop(self, task_info: DynamicTaskInfo): + pass + + async def restart(self, task_info: DynamicTaskInfo): + pass + + async def info(self, task_info: DynamicTaskInfo): + pass + + +__client: DynamicTasksClient = DynamicTasksClient() + + +async def __get_client() -> DynamicTasksClient: + if not settings.DYNAMIC_TASKS_CONTROLLER or not settings.DYNAMIC_TASKS_CONTROLLER_TOKEN: + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="dynamic tasks not enabled", + ) + return __client + + +async def get_dynamic_task(task: CURRENT_TASK) -> TaskDB: + if not task.dynamic_task_handle: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Bad task", + ) + return task + + +CLIENT = Annotated[DynamicTasksClient, Depends(__get_client)] +CURRENT_DYNAMIC_TASK = Annotated[TaskDB, Depends(get_dynamic_task)] + + +@router.post("/start/{task_id}") +async def api_dynamic_task_start(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): + return await client.start(DynamicTaskInfo.build(task=task, user=user)) + + +@router.post("/stop/{task_id}") +async def api_dynamic_task_stop(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): + return await client.stop(DynamicTaskInfo.build(task=task, user=user)) + + +@router.post("/restart/{task_id}") +async def api_dynamic_task_restart(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): + return await client.restart(DynamicTaskInfo.build(task=task, user=user)) + + +@router.post("/info/{task_id}") +async def api_dynamic_task_info(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): + return await client.info(DynamicTaskInfo.build(task=task, user=user)) From 3ad9c23e36a96984d0b8d3a437abb90c2c420b76 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sun, 11 Aug 2024 03:11:36 +0300 Subject: [PATCH 20/42] Some fixes in dynamic tasks svc --- dynamic_tasks_app/config.py | 10 ++++++++++ dynamic_tasks_app/connectors/__init__.py | 6 ++++++ dynamic_tasks_app/connectors/kub/__init__.py | 2 +- dynamic_tasks_app/controllers/expiration_controller.py | 2 +- 4 files changed, 18 insertions(+), 2 deletions(-) diff --git a/dynamic_tasks_app/config.py b/dynamic_tasks_app/config.py index f77739a..c0fb595 100644 --- a/dynamic_tasks_app/config.py +++ b/dynamic_tasks_app/config.py @@ -1,11 +1,15 @@ +import datetime from pathlib import Path from typing import Self +from uuid import UUID from pydantic import model_validator from pydantic_settings import BaseSettings, SettingsConfigDict _DEFAULT_TOKEN = "default_token_CHANGE_ME" # noqa: S105 # intended +DEFAULT_TTL = datetime.timedelta(hours=1) + class DefaultTokenError(ValueError): pass @@ -26,6 +30,12 @@ class Settings(BaseSettings): S3_ACCESS: str S3_SECRET: str + EXTERNAL_IPS: list[str] + PORT_START: int = 20000 + PORT_END: int = 40000 + + UUID_TO_PATH_MAPPING: dict[UUID, Path] = {} + @property def kube_config_path(self) -> str | None: if not self.KUBE_CONFIG_PATH: diff --git a/dynamic_tasks_app/connectors/__init__.py b/dynamic_tasks_app/connectors/__init__.py index 85d1751..fb467ce 100644 --- a/dynamic_tasks_app/connectors/__init__.py +++ b/dynamic_tasks_app/connectors/__init__.py @@ -154,6 +154,11 @@ def get_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: return self.tasks_index[k] + def free_ltask_info(self, task_info: DynamicTaskInfo) -> None: + k = (task_info.descriptor, task_info.user_id) + + del self.tasks_index[k] + async def start(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: ltask_info = self.init_ltask_info(task_info) ltask_info.hp = self.ports_controller.get_host_and_port() @@ -162,6 +167,7 @@ async def start(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: stack = await self._start(ltask_info) stack.callback(lambda: self.ports_controller.free_port(ltask_info.hp_ok)) + stack.callback(lambda: self.free_ltask_info(task_info)) info = await self.expiration_controller.push_stack(stack) logger.info(f"Pushed {info = }") diff --git a/dynamic_tasks_app/connectors/kub/__init__.py b/dynamic_tasks_app/connectors/kub/__init__.py index 8e2f654..a22707e 100644 --- a/dynamic_tasks_app/connectors/kub/__init__.py +++ b/dynamic_tasks_app/connectors/kub/__init__.py @@ -47,7 +47,7 @@ async def _start(self, task_info: LocalTaskInfo) -> AsyncExitStack: flag="crab{test}", host=task_info.hp_ok.host, port=task_info.hp_ok.port, - skip_build=True, + skip_build=False, ) async def _stop(self, task_info: LocalTaskInfo) -> None: diff --git a/dynamic_tasks_app/controllers/expiration_controller.py b/dynamic_tasks_app/controllers/expiration_controller.py index bf6e71c..69ca6ce 100644 --- a/dynamic_tasks_app/controllers/expiration_controller.py +++ b/dynamic_tasks_app/controllers/expiration_controller.py @@ -7,7 +7,7 @@ from loguru import logger -DEFAULT_TTL = datetime.timedelta(minutes=1) +from ..config import DEFAULT_TTL @dataclass From 27e81c8535f08bfa67b2e07c368f713d15b29d7f Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sun, 11 Aug 2024 03:12:26 +0300 Subject: [PATCH 21/42] Make PoC for using dynamic tasks from frontend --- app/api/__init__.py | 2 + app/api/api_dynamic_tasks.py | 107 +++++++++++++++++++++++++------- app/view/templates/base.jhtml | 2 +- app/view/templates/macro.jhtml | 110 ++++++++++++++++++++++++++++++++- app/view/templates/task.jhtml | 1 + app/view/templates/tasks.jhtml | 1 + 6 files changed, 198 insertions(+), 25 deletions(-) diff --git a/app/api/__init__.py b/app/api/__init__.py index e14520d..281d4b0 100644 --- a/app/api/__init__.py +++ b/app/api/__init__.py @@ -13,8 +13,10 @@ from . import api_auth # noqa from . import api_tasks # noqa from . import api_users # noqa +from . import api_dynamic_tasks # noqa api_users.router.include_router(api_auth.router) router.include_router(api_users.router) router.include_router(api_tasks.router) +router.include_router(api_dynamic_tasks.router) router.include_router(admin.router) diff --git a/app/api/api_dynamic_tasks.py b/app/api/api_dynamic_tasks.py index 2a3f967..e0933b2 100644 --- a/app/api/api_dynamic_tasks.py +++ b/app/api/api_dynamic_tasks.py @@ -1,10 +1,14 @@ +import datetime from collections.abc import Callable -from typing import Annotated, Literal, Self, cast +from typing import Annotated, Literal, Self, TypeAlias, cast from uuid import UUID +import httpx +import humanize from fastapi import APIRouter, Depends, HTTPException, Request, Response, status +from fastapi.responses import HTMLResponse from httpx import AsyncClient -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter from .. import auth, db, schema from ..config import settings @@ -31,26 +35,78 @@ class DynamicTaskInfo(BaseModel): @classmethod def build(cls, task: schema.Task, user: schema.User) -> Self: - if not task.dynamic_task_type: + if not task.dynamic_task_info: raise Exception("impossible") return cls( name=f"{task.task_id}", descriptor=task.task_id, - type=task.dynamic_task_type, + type=task.dynamic_task_info.dynamic_task_type, user_id=f"{user.user_id}", ) +class ExternalDynamicTaskInfo(BaseModel): + class HostPortPair(BaseModel): + host: str + port: int + + id: UUID + + task_descriptor: UUID + + user_id: str + + hp: HostPortPair + + least_time: datetime.timedelta + + +class ExternalDynamicTaskError(BaseModel): + class Detail(BaseModel): + error: str + + detail: Detail + + +_TT: TypeAlias = ExternalDynamicTaskInfo | ExternalDynamicTaskError +ExternalDynamicTaskResp = TypeAdapter[_TT](_TT) + + class DynamicTasksClient(AsyncClient): def __init__(self) -> None: - if not settings.DYNAMIC_TASKS_CONTROLLER_TOKEN: + if not settings.DYNAMIC_TASKS_CONTROLLER_TOKEN or not settings.DYNAMIC_TASKS_CONTROLLER: return - self.headers["X-Token"] = settings.DYNAMIC_TASKS_CONTROLLER_TOKEN + super().__init__( + base_url=settings.DYNAMIC_TASKS_CONTROLLER, + headers={ + "X-Token": settings.DYNAMIC_TASKS_CONTROLLER_TOKEN, + }, + timeout=httpx.Timeout(connect=5.0, read=120.0, write=5.0, pool=5.0), + ) + + def format_resp(self, resp: httpx.Response) -> str: + info = ExternalDynamicTaskResp.validate_json(resp.text) + return self.format_info(info) - async def start(self, task_info: DynamicTaskInfo): - pass + def format_info(self, info: _TT) -> str: + if not isinstance(info, ExternalDynamicTaskInfo): + return f"Status: {info.detail}" + + ret = "" + ret += "Status: Running
" + + link = f"http://{info.hp.host}:{info.hp.port}/" + ret += f"{link}
" + + ret += f"Will die after {humanize.precisedelta(info.least_time)}" + + return ret + + async def start(self, task_info: DynamicTaskInfo) -> str: + resp = await self.post("/api/start", json=task_info.model_dump(mode="json")) + return self.format_resp(resp) async def stop(self, task_info: DynamicTaskInfo): pass @@ -58,8 +114,9 @@ async def stop(self, task_info: DynamicTaskInfo): async def restart(self, task_info: DynamicTaskInfo): pass - async def info(self, task_info: DynamicTaskInfo): - pass + async def info(self, task_info: DynamicTaskInfo) -> str: + resp = await self.post("/api/info", json=task_info.model_dump(mode="json")) + return self.format_resp(resp) __client: DynamicTasksClient = DynamicTasksClient() @@ -75,7 +132,7 @@ async def __get_client() -> DynamicTasksClient: async def get_dynamic_task(task: CURRENT_TASK) -> TaskDB: - if not task.dynamic_task_handle: + if not task.dynamic_task_info: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="Bad task", @@ -87,21 +144,25 @@ async def get_dynamic_task(task: CURRENT_TASK) -> TaskDB: CURRENT_DYNAMIC_TASK = Annotated[TaskDB, Depends(get_dynamic_task)] -@router.post("/start/{task_id}") -async def api_dynamic_task_start(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): - return await client.start(DynamicTaskInfo.build(task=task, user=user)) +@router.get("/start/{task_id}") +async def api_dynamic_task_start(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT) -> HTMLResponse: + info = await client.start(DynamicTaskInfo.build(task=task, user=user)) + return HTMLResponse(info) -@router.post("/stop/{task_id}") -async def api_dynamic_task_stop(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): - return await client.stop(DynamicTaskInfo.build(task=task, user=user)) +@router.get("/stop/{task_id}") +async def api_dynamic_task_stop(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT) -> HTMLResponse: + info = await client.stop(DynamicTaskInfo.build(task=task, user=user)) + return HTMLResponse(info) -@router.post("/restart/{task_id}") -async def api_dynamic_task_restart(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): - return await client.restart(DynamicTaskInfo.build(task=task, user=user)) +@router.get("/restart/{task_id}") +async def api_dynamic_task_restart(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT) -> HTMLResponse: + info = await client.restart(DynamicTaskInfo.build(task=task, user=user)) + return HTMLResponse(info) -@router.post("/info/{task_id}") -async def api_dynamic_task_info(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT): - return await client.info(DynamicTaskInfo.build(task=task, user=user)) +@router.get("/info/{task_id}") +async def api_dynamic_task_info(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT) -> HTMLResponse: + info = await client.info(DynamicTaskInfo.build(task=task, user=user)) + return HTMLResponse(info) diff --git a/app/view/templates/base.jhtml b/app/view/templates/base.jhtml index e7eeb6d..4a26b97 100644 --- a/app/view/templates/base.jhtml +++ b/app/view/templates/base.jhtml @@ -98,7 +98,7 @@ {% block scripts %} - + {# #} diff --git a/app/view/templates/macro.jhtml b/app/view/templates/macro.jhtml index b132424..b91de9c 100644 --- a/app/view/templates/macro.jhtml +++ b/app/view/templates/macro.jhtml @@ -72,6 +72,24 @@

{{ desc }}

{% endif %} {% endfor %} #} + + {% if task.dynamic_task_info is not none %} +
+
+
No info ;(
+
+ + + + + {% if user and user.admin_checker() %} + + + + {% endif %} +
+
+ {% endif %} {% if with_flag_box %} - {{ flag_box(user) }} + {{ flag_box(user) }} {% endif %} {% if False and solved_list %} {# TODO: дописать это дерьмо! #} @@ -110,3 +128,93 @@
{% endif %} {% endmacro %} + +{% macro dynamic_task_script(user=None) %} + +{% endmacro %} diff --git a/app/view/templates/task.jhtml b/app/view/templates/task.jhtml index 32cf687..70e0452 100644 --- a/app/view/templates/task.jhtml +++ b/app/view/templates/task.jhtml @@ -19,4 +19,5 @@ {% block footer %} {{ super() }} + {{ macro.dynamic_task_script(curr_user) }} {% endblock %} diff --git a/app/view/templates/tasks.jhtml b/app/view/templates/tasks.jhtml index 7aad870..670381e 100644 --- a/app/view/templates/tasks.jhtml +++ b/app/view/templates/tasks.jhtml @@ -73,6 +73,7 @@ {% block scripts %} {{ super() }} + {{ macro.dynamic_task_script(curr_user) }} {% endblock %} {% block right_header_buttons %} {{ super() }} @@ -59,7 +55,7 @@ {% block content %}
-
+
{% for task in tasks|sort(attribute='scoring.points') %} {{ macro.task_show(task, curr_user) }} {% endfor %} @@ -109,14 +105,17 @@ function redrawVisible () { for (var k of Object.keys(categorySelector.categories)) { if (categorySelector.categories[k] == true) { - $("." + k).show() + $("." + k).show(); if (categorySelector.showSolved == false) { $(".solved").hide(); } } else { - $("." + k).hide() + $("." + k).hide(); } } + if (macy) { + macy.recalculate(true); + } } function setCategory (target, status) { @@ -221,5 +220,20 @@ } event.stopPropagation(); //Always stop propagation }); + + var macy = Macy({ + container: '#macy-container', + trueOrder: false, + waitForImages: false, + margin: 4, + columns: 5, + breakAt: { + 1400: 4, + 1200: 3, + 992: 2, + 576: 1, + } + }); + {% endblock %} From 2beb993774fcb17427d64c8a432149f5f7b24510 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Thu, 15 Aug 2024 04:43:17 +0300 Subject: [PATCH 29/42] Migrate from tonns of req.txt to one pyproject.toml --- pyproject.toml | 43 ++++++++++++++++++++++++++++++++++++++-- requirements-cli.txt | 4 ---- requirements-dev.txt | 5 ----- requirements-dynamic.txt | 17 ---------------- requirements.txt | 23 --------------------- 5 files changed, 41 insertions(+), 51 deletions(-) delete mode 100644 requirements-cli.txt delete mode 100644 requirements-dev.txt delete mode 100644 requirements-dynamic.txt delete mode 100644 requirements.txt diff --git a/pyproject.toml b/pyproject.toml index 37674b9..a295b3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ authors = [ { name = "Maxim Anfinogenov", email = "anfinogenov@kksctf.ru" }, ] -license = "Apache-2.0" +license = { file = "LICENSE" } readme = "README.md" @@ -19,6 +19,46 @@ keywords = ["ctf", "jeopardy", "ctf-platform", "fastapi"] classifiers = ["Topic :: Software Development"] +dependencies = [ + "pydantic~=2.8.2", + "pydantic-settings~=2.3.4", + "fastapi~=0.112.0", + "aiohttp~=3.10.3", + "python-jose[cryptography]~=3.3.0", + "Jinja2~=3.1.2", + "markdown~=3.3.7", + "uvicorn~=0.30.6", + "humanize~=4.10.0", + "prometheus-fastapi-instrumentator~=5.6.0", + "beanie~=1.26.0", + "websockets~=12.0", + "fastui~=0.6.0", + "httpx~=0.27.0", + "formgen@git+https://github.com/kksctf/formgen.git@master", +] + + +[project.optional-dependencies] +cli = ["typer~=0.9.0", "rich~=13.6.0", "pydantic-yaml~=1.2.0"] +dev = [ + "pytest~=8.3.2", + "pytest-cov~=5.0.0", + "pytest-env~=1.1.3", + "ruff~=0.5.4", +] +dynamic = [ + "lightkube~=0.15.3", + "miniopy-async~=1.20.1", + "loguru~=0.7.2", + "ruamel.yaml~=0.18.6", + "cyclopts~=2.9.3", + "docker-registry-client-async~=0.2.11", +] + +[tool.setuptools.packages.find] +where = ["./"] +include = [""] + [project.urls] homepage = "https://github.com/kksctf/yatb" repository = "https://github.com/kksctf/yatb" @@ -61,7 +101,6 @@ task-tags = ["TODO", "FIXME", "WTF", "XXX"] # rules... select = ["ALL"] ignore = [ - "ANN101", # | ruff? | Missing type annotation for `self` in method # non sense "B008", # | ruff? | Do not perform function call `...` in argument defaults # fastapi DI... "D100", # | pydocstyle | Missing docstring in public module # meh "D101", # | pydocstyle | Missing docstring in public class # meh diff --git a/requirements-cli.txt b/requirements-cli.txt deleted file mode 100644 index d34c646..0000000 --- a/requirements-cli.txt +++ /dev/null @@ -1,4 +0,0 @@ -typer==0.9.0 -rich==13.5.2 -httpx==0.22.0 -pydantic-yaml==1.2.0 diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 264c231..0000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,5 +0,0 @@ -pytest==7.1.2 -pytest-cov==2.10.1 -pytest-env==0.6.2 -httpx==0.22.0 -ruff==0.5.1 diff --git a/requirements-dynamic.txt b/requirements-dynamic.txt deleted file mode 100644 index 09eb9ab..0000000 --- a/requirements-dynamic.txt +++ /dev/null @@ -1,17 +0,0 @@ -# kr8s==0.17.0 -lightkube==0.15.3 - -miniopy-async==1.20.1 - -pydantic-settings==2.3.4 -pydantic==2.8.2 -fastapi==0.101.1 - -uvicorn==0.17.6 - -loguru==0.7.2 - -ruamel.yaml==0.18.6 -cyclopts==2.9.3 - -docker-registry-client-async==0.2.11 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 7e2c402..0000000 --- a/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -pydantic-settings==2.3.4 -pydantic==2.8.2 -fastapi==0.101.1 - -aiofiles==0.8.0 -aiohttp==3.8.3 -python-jose[cryptography]==3.3.0 -Jinja2==3.1.2 -markdown==3.3.7 -uvicorn==0.17.6 -humanize==4.1.0 -prometheus-fastapi-instrumentator==5.6.0 -markupsafe==2.0.1 -websockets==10.4 - -beanie==1.26.0 - -fastui==0.6.0 - -# beanie>=1.23.6,<2 -# git+https://github.com/Rubikoid/beanie.git@encoder-fix - -git+https://github.com/kksctf/formgen.git@master From 0889d56093af42c605700904ac23c3a17895db21 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Thu, 15 Aug 2024 05:14:22 +0300 Subject: [PATCH 30/42] Make service source hashing stable (focking gzip mtime) Now caching fully works! --- dynamic_tasks_app/connectors/kub/api.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/dynamic_tasks_app/connectors/kub/api.py b/dynamic_tasks_app/connectors/kub/api.py index f358b23..414f001 100644 --- a/dynamic_tasks_app/connectors/kub/api.py +++ b/dynamic_tasks_app/connectors/kub/api.py @@ -7,7 +7,9 @@ import tarfile from collections.abc import AsyncGenerator from contextlib import AsyncExitStack, asynccontextmanager +from gzip import GzipFile from pathlib import Path, PurePosixPath +from typing import IO, cast from aiohttp.client_exceptions import ClientResponseError from docker_registry_client_async import DockerRegistryClientAsync, ImageName @@ -166,7 +168,15 @@ async def build( return destination with io.BytesIO() as buff: - with tarfile.open(fileobj=buff, mode="w:gz") as tar: + with ( + # have to separately create gzip, because we need to setup mtime=0 + GzipFile(fileobj=buff, mode="wb", mtime=0) as gzip, + tarfile.open( + # https://stackoverflow.com/a/58407810 + fileobj=cast(IO[bytes], gzip), # IDK WHY, but for some reason gzip is not IO[bytes]... + mode="w|", + ) as tar, + ): for file in source.iterdir(): tar.add(file, arcname=file.relative_to(source)) # string absolute long path buff.seek(0) # reset to 0. because... you knew. From d50c0762812cfbad8e352e9a880715e9c07e7cab Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:28:39 +0300 Subject: [PATCH 31/42] Add lock to BaseConnector Add and hadle properly InstanceNotFoundError --- dynamic_tasks_app/connectors/__init__.py | 66 +++++++++++++++--------- dynamic_tasks_app/connectors/errors.py | 4 ++ dynamic_tasks_app/web.py | 10 +++- 3 files changed, 55 insertions(+), 25 deletions(-) diff --git a/dynamic_tasks_app/connectors/__init__.py b/dynamic_tasks_app/connectors/__init__.py index fd5629d..13f9dd6 100644 --- a/dynamic_tasks_app/connectors/__init__.py +++ b/dynamic_tasks_app/connectors/__init__.py @@ -1,3 +1,4 @@ +import asyncio import datetime from abc import ABC, abstractmethod from contextlib import AsyncExitStack @@ -12,7 +13,7 @@ from ..controllers import ExpirationController, PortsController from ..controllers.ports_controller import HostPortPair -from .errors import GenericConnectorError +from .errors import GenericConnectorError, InstanceNotFoundError class DynamicTaskType(Enum): @@ -85,6 +86,7 @@ class ExternalDynamicTaskInfo(BaseModel): class BaseConnector(ABC): tasks_index: dict[tuple[UUID, str], LocalTaskInfo] + tasks_lock: asyncio.Lock expiration_controller: ExpirationController ports_controller: PortsController @@ -93,6 +95,7 @@ def __init__(self, expiration_controller: ExpirationController, ports_controller super().__init__() self.tasks_index = {} + self.tasks_lock = asyncio.Lock() self.expiration_controller = expiration_controller self.ports_controller = ports_controller @@ -118,19 +121,29 @@ async def close(self) -> None: await self.expiration_controller.close() @abstractmethod - async def _start(self, task_info: LocalTaskInfo) -> AsyncExitStack: + async def _start(self, ltask_info: LocalTaskInfo) -> AsyncExitStack: raise NotImplementedError - @abstractmethod - async def _stop(self, task_info: LocalTaskInfo) -> None: - raise NotImplementedError + async def _stop(self, ltask_info: LocalTaskInfo) -> None: + if not ltask_info.expiration_id: + raise GenericConnectorError("Task is not initialized yet") + + expiration_stack_info = await self.expiration_controller.get(ltask_info.expiration_id) + + if task := expiration_stack_info.death_task: + task.cancel() + + await self.expiration_controller.kill(expiration_stack_info) @abstractmethod - async def _restart(self, task_info: LocalTaskInfo) -> None: + async def _restart(self, ltask_info: LocalTaskInfo) -> None: raise NotImplementedError async def _info(self, ltask_info: LocalTaskInfo) -> ExternalDynamicTaskInfo: - expiration_info = self.expiration_controller.get(ltask_info.expiration_id_ok) + if not ltask_info.expiration_id: + raise GenericConnectorError("Task is not initialized yet") + + expiration_info = await self.expiration_controller.get(ltask_info.expiration_id) return ExternalDynamicTaskInfo( id=ltask_info.id, @@ -140,30 +153,35 @@ async def _info(self, ltask_info: LocalTaskInfo) -> ExternalDynamicTaskInfo: least_time=expiration_info.time_left, ) - def init_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: - k = (task_info.descriptor, task_info.user_id) + async def init_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: + async with self.tasks_lock: + k = (task_info.descriptor, task_info.user_id) - if k in self.tasks_index: - raise GenericConnectorError("This task for your team already exsits") + if k in self.tasks_index: + raise GenericConnectorError("This task for your team already exsits") - self.tasks_index[k] = LocalTaskInfo.build(task_info) - return self.tasks_index[k] + self.tasks_index[k] = LocalTaskInfo.build(task_info) + return self.tasks_index[k] - def get_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: - k = (task_info.descriptor, task_info.user_id) + async def get_ltask_info(self, task_info: DynamicTaskInfo) -> LocalTaskInfo: + async with self.tasks_lock: + k = (task_info.descriptor, task_info.user_id) - if k not in self.tasks_index: - raise GenericConnectorError("No task found") + if k not in self.tasks_index: + raise InstanceNotFoundError("No task found") - return self.tasks_index[k] + return self.tasks_index[k] def free_ltask_info(self, task_info: DynamicTaskInfo) -> None: + # since this is sync method, we can do not lock + assert not self.tasks_lock.locked() + k = (task_info.descriptor, task_info.user_id) del self.tasks_index[k] async def start(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: - ltask_info = self.init_ltask_info(task_info) + ltask_info = await self.init_ltask_info(task_info) ltask_info.hp = self.ports_controller.get_host_and_port() logger.info(f"Got port {ltask_info.hp = }") @@ -179,19 +197,19 @@ async def start(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: return await self._info(ltask_info) async def stop(self, task_info: DynamicTaskInfo) -> None: - ltask_info = self.get_ltask_info(task_info) + ltask_info = await self.get_ltask_info(task_info) await self._stop(ltask_info) async def restart(self, task_info: DynamicTaskInfo) -> None: - ltask_info = self.get_ltask_info(task_info) + ltask_info = await self.get_ltask_info(task_info) await self._restart(ltask_info) async def extend(self, task_info: DynamicTaskInfo) -> None: - ltask_info = self.get_ltask_info(task_info) - self.expiration_controller.extend_life(ltask_info.expiration_id_ok, datetime.timedelta(minutes=1)) + ltask_info = await self.get_ltask_info(task_info) + await self.expiration_controller.extend_life(ltask_info.expiration_id_ok, datetime.timedelta(minutes=1)) async def info_task(self, task_info: DynamicTaskInfo) -> ExternalDynamicTaskInfo: - ltask_info = self.get_ltask_info(task_info) + ltask_info = await self.get_ltask_info(task_info) return await self._info(ltask_info) # async def info_id(self, dynamic_task_id: UUID) -> ExternalDynamicTaskInfo: diff --git a/dynamic_tasks_app/connectors/errors.py b/dynamic_tasks_app/connectors/errors.py index f1e396c..0bbdbb7 100644 --- a/dynamic_tasks_app/connectors/errors.py +++ b/dynamic_tasks_app/connectors/errors.py @@ -1,2 +1,6 @@ class GenericConnectorError(Exception): pass + + +class InstanceNotFoundError(GenericConnectorError): + pass diff --git a/dynamic_tasks_app/web.py b/dynamic_tasks_app/web.py index 8688ecd..3ef445e 100644 --- a/dynamic_tasks_app/web.py +++ b/dynamic_tasks_app/web.py @@ -6,7 +6,8 @@ from dynamic_tasks_app.connectors import ExternalDynamicTaskInfo from .config import settings -from .connectors import DynamicTaskInfo, GenericConnectorError +from .connectors import DynamicTaskInfo +from .connectors.errors import GenericConnectorError, InstanceNotFoundError from .connectors.kub import KubeConnector # WTF: tmp for dev @@ -39,6 +40,13 @@ async def lifespan(app: FastAPI): async def execption_handler(): try: yield + except InstanceNotFoundError as ex: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "error": f"{ex!r}", + }, + ) from ex except GenericConnectorError as ex: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, From 764a3559e46df37d6770e1da2292e859624476d5 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:30:06 +0300 Subject: [PATCH 32/42] Protect expiration controller with lock --- .../controllers/expiration_controller.py | 38 ++++++++++++------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/dynamic_tasks_app/controllers/expiration_controller.py b/dynamic_tasks_app/controllers/expiration_controller.py index 69ca6ce..0fccda3 100644 --- a/dynamic_tasks_app/controllers/expiration_controller.py +++ b/dynamic_tasks_app/controllers/expiration_controller.py @@ -56,18 +56,23 @@ def __repr__(self) -> str: class ExpirationController: root_stack: AsyncExitStack stacks: dict[UUID, StackInfo] + stacks_lock: asyncio.Lock def __init__(self) -> None: self.root_stack = AsyncExitStack() self.stacks = {} + self.stacks_lock = asyncio.Lock() - def get(self, id: UUID) -> StackInfo: - return self.stacks[id] + async def get(self, id: UUID) -> StackInfo: + async with self.stacks_lock: + return self.stacks[id] async def push_stack(self, stack: AsyncExitStack) -> StackInfo: stack = await self.root_stack.enter_async_context(stack) - info = StackInfo.build(stack) - self.stacks[info.id] = info + + async with self.stacks_lock: + info = StackInfo.build(stack) + self.stacks[info.id] = info await self._create_death_task(info) @@ -75,8 +80,8 @@ async def push_stack(self, stack: AsyncExitStack) -> StackInfo: return info - def extend_life(self, id: UUID, by: datetime.timedelta) -> StackInfo: - info = self.stacks[id] + async def extend_life(self, id: UUID, by: datetime.timedelta) -> StackInfo: + info = await self.get(id) info.extend_life(by) logger.info(f"Lifetime of {info = } extended") @@ -85,22 +90,29 @@ def extend_life(self, id: UUID, by: datetime.timedelta) -> StackInfo: return info + async def kill(self, info: StackInfo) -> None: + await info.die() + + async with self.stacks_lock: + del self.stacks[info.id] + + logger.info(f"{info = } is cleaned") + async def _create_death_task(self, info: StackInfo) -> None: async def _task() -> None: try: await asyncio.sleep(info.time_left.seconds + 1) + + if not info.is_expired: + logger.info(f"{info = } death task finished, but info is fresh, so restaring") + info.death_task = asyncio.create_task(_task()) + return except asyncio.CancelledError: logger.info(f"{info = } death task got cancelled") - - if not info.is_expired: - logger.info(f"{info = } death task finished, but info is fresh, so restaring") - info.death_task = asyncio.create_task(_task()) return logger.info(f"{info = } is expired") - await info.die() - del self.stacks[info.id] - logger.info(f"{info = } is cleaned") + await self.kill(info) info.death_task = asyncio.create_task(_task()) From 78ac9b022ad2a7e20213c67ace0574ca198e9aab Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:30:27 +0300 Subject: [PATCH 33/42] Make stop method work --- app/api/api_dynamic_tasks.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/app/api/api_dynamic_tasks.py b/app/api/api_dynamic_tasks.py index 4360790..d4a5015 100644 --- a/app/api/api_dynamic_tasks.py +++ b/app/api/api_dynamic_tasks.py @@ -112,7 +112,17 @@ async def start(self, task_info: DynamicTaskInfo) -> str: return self.format_resp(resp) async def stop(self, task_info: DynamicTaskInfo): - pass + resp = await self.post("/api/stop", json=task_info.model_dump(mode="json")) + + if resp.status_code == status.HTTP_200_OK: + return "ok" + + try: + err = ExternalDynamicTaskError.model_validate_json(resp.text) + except Exception as ex: + return "error?" + else: + return f"Status: {err.detail}" async def restart(self, task_info: DynamicTaskInfo): pass From 6062a6f11162a3ccf269c1317698d7c9a5fb8368 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:32:53 +0300 Subject: [PATCH 34/42] Support ipv6 links --- app/api/api_dynamic_tasks.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/app/api/api_dynamic_tasks.py b/app/api/api_dynamic_tasks.py index d4a5015..08dcff0 100644 --- a/app/api/api_dynamic_tasks.py +++ b/app/api/api_dynamic_tasks.py @@ -100,7 +100,18 @@ def format_info(self, info: _TT) -> str: ret = "" ret += "Status: Running
" - link = f"http://{info.hp.host}:{info.hp.port}/" + try: + ip = ip_address(info.hp.host) + + if ip.version == 4: + ip = f"{ip}" + elif ip.version == 6: + ip = f"[{ip}]" + + except ValueError as ex: + ip = info.hp.host + + link = f"http://{ip}:{info.hp.port}/" ret += f"{link}
" ret += f"Will die after {humanize.precisedelta(info.least_time)}" From 24c598e91b72fa5ddcc503a841744833e090fbf1 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:33:24 +0300 Subject: [PATCH 35/42] Implement _stop in base connector --- dynamic_tasks_app/connectors/kub/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/dynamic_tasks_app/connectors/kub/__init__.py b/dynamic_tasks_app/connectors/kub/__init__.py index e1f8afb..95ccd4d 100644 --- a/dynamic_tasks_app/connectors/kub/__init__.py +++ b/dynamic_tasks_app/connectors/kub/__init__.py @@ -50,8 +50,5 @@ async def _start(self, task_info: LocalTaskInfo) -> AsyncExitStack: skip_build=False, ) - async def _stop(self, task_info: LocalTaskInfo) -> None: - raise NotImplementedError - async def _restart(self, task_info: LocalTaskInfo) -> None: raise NotImplementedError From cae0e8fec5b71ad03ac6a3cb5e89bc575bc829dd Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:33:59 +0300 Subject: [PATCH 36/42] Add ipFamilyPolicy="PreferDualStack" to k8s service --- dynamic_tasks_app/connectors/kub/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dynamic_tasks_app/connectors/kub/client.py b/dynamic_tasks_app/connectors/kub/client.py index 30a3c14..34229c6 100644 --- a/dynamic_tasks_app/connectors/kub/client.py +++ b/dynamic_tasks_app/connectors/kub/client.py @@ -142,6 +142,7 @@ def simple_service( externalIPs=external_ips, selector={"app.kubernetes.io/name": name}, ports=[ServicePort(port=external_port, targetPort=target_port)], + ipFamilyPolicy="PreferDualStack", ), ) From 1e58300c278a0f90c8d8b8884251ff08fa0aa730 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:34:16 +0300 Subject: [PATCH 37/42] Remove flag_sign_key --- dynamic_tasks_app/config.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/dynamic_tasks_app/config.py b/dynamic_tasks_app/config.py index c0fb595..bf93df9 100644 --- a/dynamic_tasks_app/config.py +++ b/dynamic_tasks_app/config.py @@ -19,9 +19,7 @@ class Settings(BaseSettings): DEBUG: bool = False TESTING: bool = False - FLAG_SIGN_KEY: str = _DEFAULT_TOKEN - - DYNAMIC_TASKS_CONTROLLER_TOKEN: str | None = None + DYNAMIC_TASKS_CONTROLLER_TOKEN: str = _DEFAULT_TOKEN KUBE_CONFIG_PATH: Path | None = None @@ -52,7 +50,7 @@ def check_non_default_tokens(self) -> Self: if self.DEBUG or self.TESTING: return self - token_check_list = ["FLAG_SIGN_KEY"] + token_check_list = ["DYNAMIC_TASKS_CONTROLLER_TOKEN"] for token_name in token_check_list: if getattr(self, token_name) == _DEFAULT_TOKEN: raise DefaultTokenError(f"Field '{token_name}' have default token value") From f647c882fe2b8c377556e201da572a9679a3cfc6 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:35:08 +0300 Subject: [PATCH 38/42] upd release notes --- docs/release-notes.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/release-notes.md b/docs/release-notes.md index f525d73..8073e9f 100644 --- a/docs/release-notes.md +++ b/docs/release-notes.md @@ -2,6 +2,13 @@ ## Latest version +- Added: + - FastUI PoC + - Dynamic Tasks microservice and it's integration into yatb +- Changed: + - Migrated from my beanie fork back to upstream. +- Fixed: + - Many tests. They now work on DEBUG=False build of code, because running tests in debug mode is something strange. ## 0.6.3a0 - Added: From 2bde268b444d164bc333d98901eb08b4a4b35936 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 17 Aug 2024 01:46:31 +0300 Subject: [PATCH 39/42] Fix authors without @ --- app/schema/task.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/schema/task.py b/app/schema/task.py index 24ed56d..c341747 100644 --- a/app/schema/task.py +++ b/app/schema/task.py @@ -191,6 +191,10 @@ class TaskForm(EBaseModel): dynamic_task_info: DynamicTaskInfo | None = None def to_task(self, cls: type[_T], author: User) -> _T: + str_author = self.author if self.author != "" else f"@{author.username}" + if not str_author.startswith("@"): + str_author = f"@{str_author}" + task = cls( task_name=self.task_name, category=self.category, @@ -198,7 +202,7 @@ def to_task(self, cls: type[_T], author: User) -> _T: description=self.description, description_html=Task.regenerate_md(self.description), flag=self.flag, - author=(self.author if self.author != "" else f"@{author.username}"), + author=str_author, dynamic_task_info=self.dynamic_task_info, ) return task From 3d51e1e107d1be1739f713ebe61066d8e719e0d4 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sun, 18 Aug 2024 20:50:55 +0300 Subject: [PATCH 40/42] Add pdm lock --- pdm.lock | 971 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 971 insertions(+) create mode 100644 pdm.lock diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 0000000..9eebfd0 --- /dev/null +++ b/pdm.lock @@ -0,0 +1,971 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:af6ad0fff4ff8843e5438f99e631cafd7ff0480c54bcbf5f1a94fcfc325ff65c" + +[[metadata.targets]] +requires_python = ">=3.11" + +[[package]] +name = "aiohappyeyeballs" +version = "2.3.7" +requires_python = ">=3.8" +summary = "Happy Eyeballs for asyncio" +groups = ["default"] +files = [ + {file = "aiohappyeyeballs-2.3.7-py3-none-any.whl", hash = "sha256:337ce4dc0e99eb697c3c5a77d6cb3c52925824d9a67ac0dea7c55b8a2d60b222"}, + {file = "aiohappyeyeballs-2.3.7.tar.gz", hash = "sha256:e794cd29ba6a14078092984e43688212a19081de3a73b6796c2fdeb3706dd6ce"}, +] + +[[package]] +name = "aiohttp" +version = "3.10.4" +requires_python = ">=3.8" +summary = "Async http client/server framework (asyncio)" +groups = ["default"] +dependencies = [ + "aiohappyeyeballs>=2.3.0", + "aiosignal>=1.1.2", + "async-timeout<5.0,>=4.0; python_version < \"3.11\"", + "attrs>=17.3.0", + "frozenlist>=1.1.1", + "multidict<7.0,>=4.5", + "yarl<2.0,>=1.0", +] +files = [ + {file = "aiohttp-3.10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dfe48f477e02ef5ab247c6ac431a6109c69b5c24cb3ccbcd3e27c4fb39691fe4"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6fe78b51852e25d4e20be51ef88c2a0bf31432b9f2223bdbd61c01a0f9253a7"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5cc75ff5efbd92301e63a157fddb18a6964a3f40e31c77d57e97dbb9bb3373b4"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dca39391f45fbb28daa6412f98c625265bf6b512cc41382df61672d1b242f8f4"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8616dd5ed8b3b4029021b560305041c62e080bb28f238c27c2e150abe3539587"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d7958ba22854b3f00a7bbb66cde1dc759760ce8a3e6dfe9ea53f06bccaa9aa2"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a24ac7164a824ef2e8e4e9a9f6debb1f43c44ad7ad04efc6018a6610555666d"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:660ad010b8fd0b26e8edb8ae5c036db5b16baac4278198ad238b11956d920b3d"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:93ee83008d3e505db9846a5a1f48a002676d8dcc90ee431a9462541c9b81393c"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77071795efd6ba87f409001141fb05c94ee962b9fca6c8fa1f735c2718512de4"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ff371ae72a1816c3eeba5c9cff42cb739aaa293fec7d78f180d1c7ee342285b6"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c253e81f12da97f85d45441e8c6da0d9c12e07db4a7136b0a955df6fc5e4bf51"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ce101c447cf7ba4b6e5ab07bfa2c0da21cbab66922f78a601f0b84fd7710d72"}, + {file = "aiohttp-3.10.4-cp311-cp311-win32.whl", hash = "sha256:705c311ecf2d30fbcf3570d1a037c657be99095694223488140c47dee4ef2460"}, + {file = "aiohttp-3.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:ebddbfea8a8d6b97f717658fa85a96681a28990072710d3de3a4eba5d6804a37"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4d63f42d9c604521b208b754abfafe01218af4a8f6332b43196ee8fe88bbd5"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fef7b7bd3a6911b4d148332136d34d3c2aee3d54d354373b1da6d96bc08089a5"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff8606149098935188fe1e135f7e7991e6a36d6fe394fd15939fc57d0aff889"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb3df1aa83602be9a5e572c834d74c3c8e382208b59a873aabfe4c493c45ed0"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c4a71d4a5e0cbfd4bfadd13cb84fe2bc76c64d550dc4f22c22008c9354cffb3"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf61884a604c399458c4a42c8caea000fbcc44255ed89577ff50cb688a0fe8e2"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2015e4b40bd5dedc8155c2b2d24a2b07963ae02b5772373d0b599a68e38a316b"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b06e1a66bf0a1a2d0f12aef25843dfd2093df080d6c1acbc43914bb9c8f36ed3"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:eb898c9ad5a1228a669ebe2e2ba3d76aebe1f7c10b78f09a36000254f049fc2b"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2d64a5a7539320c3cecb4bca093ea825fcc906f8461cf8b42a7bf3c706ce1932"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:438c6e1492d060b21285f4b6675b941cf96dd9ef3dfdd59940561029b82e3e1f"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e99bf118afb2584848dba169a685fe092b338a4fe52ae08c7243d7bc4cc204fe"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dc26781fb95225c6170619dece8b5c6ca7cfb1b0be97b7ee719915773d0c2a9"}, + {file = "aiohttp-3.10.4-cp312-cp312-win32.whl", hash = "sha256:45bb655cb8b3a61e19977183a4e0962051ae90f6d46588ed4addb8232128141c"}, + {file = "aiohttp-3.10.4-cp312-cp312-win_amd64.whl", hash = "sha256:347bbdc48411badc24fe3a13565820bc742db3aa2f9127cd5f48c256caf87e29"}, + {file = "aiohttp-3.10.4.tar.gz", hash = "sha256:23a5f97e7dd22e181967fb6cb6c3b11653b0fdbbc4bb7739d9b6052890ccab96"}, +] + +[[package]] +name = "aiosignal" +version = "1.3.1" +requires_python = ">=3.7" +summary = "aiosignal: a list of registered asynchronous callbacks" +groups = ["default"] +dependencies = [ + "frozenlist>=1.1.0", +] +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +requires_python = ">=3.8" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["default"] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[[package]] +name = "attrs" +version = "24.2.0" +requires_python = ">=3.7" +summary = "Classes Without Boilerplate" +groups = ["default"] +dependencies = [ + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[[package]] +name = "beanie" +version = "1.26.0" +requires_python = "<4.0,>=3.7" +summary = "Asynchronous Python ODM for MongoDB" +groups = ["default"] +dependencies = [ + "click>=7", + "lazy-model==0.2.0", + "motor<4.0.0,>=2.5.0", + "pydantic<3.0,>=1.10", + "toml", + "typing-extensions>=4.7; python_version < \"3.11\"", +] +files = [ + {file = "beanie-1.26.0-py3-none-any.whl", hash = "sha256:b45926c01d4a899c519c665c2a5f230990717e99f7fd68172a389ca33e7693b9"}, + {file = "beanie-1.26.0.tar.gz", hash = "sha256:54016f4ec71ed0ea6ce0c7946a395090c45687f254dbbe1cf06eec608383f790"}, +] + +[[package]] +name = "certifi" +version = "2024.7.4" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default"] +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cffi" +version = "1.17.0" +requires_python = ">=3.8" +summary = "Foreign Function Interface for Python calling C code." +groups = ["default"] +marker = "platform_python_implementation != \"PyPy\"" +dependencies = [ + "pycparser", +] +files = [ + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, +] + +[[package]] +name = "click" +version = "8.1.7" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["default"] +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["default"] +marker = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "43.0.0" +requires_python = ">=3.7" +summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +groups = ["default"] +dependencies = [ + "cffi>=1.12; platform_python_implementation != \"PyPy\"", +] +files = [ + {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, + {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, + {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, + {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, + {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, + {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, + {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +requires_python = ">=3.8" +summary = "DNS toolkit" +groups = ["default"] +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[[package]] +name = "ecdsa" +version = "0.19.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" +summary = "ECDSA cryptographic signature library (pure python)" +groups = ["default"] +dependencies = [ + "six>=1.9.0", +] +files = [ + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +requires_python = ">=3.8" +summary = "A robust email address syntax and deliverability validation library." +groups = ["default"] +dependencies = [ + "dnspython>=2.0.0", + "idna>=2.0.0", +] +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[[package]] +name = "fastapi" +version = "0.112.1" +requires_python = ">=3.8" +summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +groups = ["default"] +dependencies = [ + "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4", + "starlette<0.39.0,>=0.37.2", + "typing-extensions>=4.8.0", +] +files = [ + {file = "fastapi-0.112.1-py3-none-any.whl", hash = "sha256:bcbd45817fc2a1cd5da09af66815b84ec0d3d634eb173d1ab468ae3103e183e4"}, + {file = "fastapi-0.112.1.tar.gz", hash = "sha256:b2537146f8c23389a7faa8b03d0bd38d4986e6983874557d95eed2acc46448ef"}, +] + +[[package]] +name = "fastui" +version = "0.6.0" +requires_python = ">=3.8" +summary = "Build better UIs faster." +groups = ["default"] +dependencies = [ + "pydantic[email]>=2.5.2", +] +files = [ + {file = "fastui-0.6.0-py3-none-any.whl", hash = "sha256:f5720ed88a6e135e91876d424a2e7a916e208d630d85d87db754881fcacd5f0e"}, + {file = "fastui-0.6.0.tar.gz", hash = "sha256:9ad5749aa0e0b0aa59a4f8109a632532f56dce2b46c0239df661c3a6cbec9084"}, +] + +[[package]] +name = "formgen" +version = "0.0.1" +requires_python = ">=3.11" +git = "https://github.com/kksctf/formgen.git" +ref = "master" +revision = "71e275e37d86f196cc223ed928c24b0c88fa1cfe" +summary = "generate HTML forms from pydantic models" +groups = ["default"] +dependencies = [ + "pydantic~=2.1", +] + +[[package]] +name = "frozenlist" +version = "1.4.1" +requires_python = ">=3.8" +summary = "A list-like structure which implements collections.abc.MutableSequence" +groups = ["default"] +files = [ + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +requires_python = ">=3.7" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["default"] +dependencies = [ + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +requires_python = ">=3.8" +summary = "A minimal low-level HTTP client." +groups = ["default"] +dependencies = [ + "certifi", + "h11<0.15,>=0.13", +] +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[[package]] +name = "httpx" +version = "0.27.0" +requires_python = ">=3.8" +summary = "The next generation HTTP client." +groups = ["default"] +dependencies = [ + "anyio", + "certifi", + "httpcore==1.*", + "idna", + "sniffio", +] +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[[package]] +name = "humanize" +version = "4.10.0" +requires_python = ">=3.8" +summary = "Python humanize utilities" +groups = ["default"] +files = [ + {file = "humanize-4.10.0-py3-none-any.whl", hash = "sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6"}, + {file = "humanize-4.10.0.tar.gz", hash = "sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978"}, +] + +[[package]] +name = "idna" +version = "3.7" +requires_python = ">=3.5" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default"] +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +requires_python = ">=3.7" +summary = "A very fast and expressive template engine." +groups = ["default"] +dependencies = [ + "MarkupSafe>=2.0", +] +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[[package]] +name = "lazy-model" +version = "0.2.0" +requires_python = ">=3.7,<4.0" +summary = "" +groups = ["default"] +dependencies = [ + "pydantic>=1.9.0", +] +files = [ + {file = "lazy-model-0.2.0.tar.gz", hash = "sha256:57c0e91e171530c4fca7aebc3ac05a163a85cddd941bf7527cc46c0ddafca47c"}, + {file = "lazy_model-0.2.0-py3-none-any.whl", hash = "sha256:5a3241775c253e36d9069d236be8378288a93d4fc53805211fd152e04cc9c342"}, +] + +[[package]] +name = "markdown" +version = "3.3.7" +requires_python = ">=3.6" +summary = "Python implementation of Markdown." +groups = ["default"] +dependencies = [ + "importlib-metadata>=4.4; python_version < \"3.10\"", +] +files = [ + {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, + {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, +] + +[[package]] +name = "markupsafe" +version = "2.1.5" +requires_python = ">=3.7" +summary = "Safely add untrusted strings to HTML/XML markup." +groups = ["default"] +files = [ + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "motor" +version = "3.5.1" +requires_python = ">=3.8" +summary = "Non-blocking MongoDB driver for Tornado or asyncio" +groups = ["default"] +dependencies = [ + "pymongo<5,>=4.5", +] +files = [ + {file = "motor-3.5.1-py3-none-any.whl", hash = "sha256:f95a9ea0f011464235e0bd72910baa291db3a6009e617ac27b82f57885abafb8"}, + {file = "motor-3.5.1.tar.gz", hash = "sha256:1622bd7b39c3e6375607c14736f6e1d498128eadf6f5f93f8786cf17d37062ac"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +requires_python = ">=3.7" +summary = "multidict implementation" +groups = ["default"] +files = [ + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "prometheus-client" +version = "0.8.0" +summary = "Python client for the Prometheus monitoring system." +groups = ["default"] +files = [ + {file = "prometheus_client-0.8.0-py2.py3-none-any.whl", hash = "sha256:983c7ac4b47478720db338f1491ef67a100b474e3bc7dafcbaefb7d0b8f9b01c"}, + {file = "prometheus_client-0.8.0.tar.gz", hash = "sha256:c6e6b706833a6bd1fd51711299edee907857be10ece535126a158f911ee80915"}, +] + +[[package]] +name = "prometheus-fastapi-instrumentator" +version = "5.6.0" +requires_python = ">=3.6,<4.0" +summary = "Instrument your FastAPI with Prometheus metrics" +groups = ["default"] +dependencies = [ + "fastapi<=1.0.0,>=0.38.1", + "prometheus-client<0.9.0,>=0.8.0", +] +files = [ + {file = "prometheus-fastapi-instrumentator-5.6.0.tar.gz", hash = "sha256:7b6d3ca4c1feb9b4282de36b87ffd8cc3e81a71cf102b8d5fd925953dd8c122d"}, + {file = "prometheus_fastapi_instrumentator-5.6.0-py3-none-any.whl", hash = "sha256:6402497cdd59f9880e49ffbd4709429c4e431f4976db9be40ad085cb031ffe8c"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.0" +requires_python = ">=3.8" +summary = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +groups = ["default"] +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +requires_python = ">=3.8" +summary = "C parser in Python" +groups = ["default"] +marker = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.8.2" +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["default"] +dependencies = [ + "annotated-types>=0.4.0", + "pydantic-core==2.20.1", + "typing-extensions>=4.12.2; python_version >= \"3.13\"", + "typing-extensions>=4.6.1; python_version < \"3.13\"", +] +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +requires_python = ">=3.8" +summary = "Core functionality for Pydantic validation and serialization" +groups = ["default"] +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[[package]] +name = "pydantic-settings" +version = "2.3.4" +requires_python = ">=3.8" +summary = "Settings management using Pydantic" +groups = ["default"] +dependencies = [ + "pydantic>=2.7.0", + "python-dotenv>=0.21.0", +] +files = [ + {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"}, + {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"}, +] + +[[package]] +name = "pydantic" +version = "2.8.2" +extras = ["email"] +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["default"] +dependencies = [ + "email-validator>=2.0.0", + "pydantic==2.8.2", +] +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[[package]] +name = "pymongo" +version = "4.8.0" +requires_python = ">=3.8" +summary = "Python driver for MongoDB " +groups = ["default"] +dependencies = [ + "dnspython<3.0.0,>=1.16.0", +] +files = [ + {file = "pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68"}, + {file = "pymongo-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:417369ce39af2b7c2a9c7152c1ed2393edfd1cbaf2a356ba31eb8bcbd5c98dd7"}, + {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d"}, + {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9365166aa801c63dff1a3cb96e650be270da06e3464ab106727223123405510f"}, + {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc8b8582f4209c2459b04b049ac03c72c618e011d3caa5391ff86d1bda0cc486"}, + {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e5019f75f6827bb5354b6fef8dfc9d6c7446894a27346e03134d290eb9e758"}, + {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b5802151fc2b51cd45492c80ed22b441d20090fb76d1fd53cd7760b340ff554"}, + {file = "pymongo-4.8.0-cp311-cp311-win32.whl", hash = "sha256:4bf58e6825b93da63e499d1a58de7de563c31e575908d4e24876234ccb910eba"}, + {file = "pymongo-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b747c0e257b9d3e6495a018309b9e0c93b7f0d65271d1d62e572747f4ffafc88"}, + {file = "pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526"}, + {file = "pymongo-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31e4d21201bdf15064cf47ce7b74722d3e1aea2597c6785882244a3bb58c7eab"}, + {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf"}, + {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2fbdb87fe5075c8beb17a5c16348a1ea3c8b282a5cb72d173330be2fecf22f5"}, + {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd39455b7ee70aabee46f7399b32ab38b86b236c069ae559e22be6b46b2bbfc4"}, + {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8"}, + {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:236bbd7d0aef62e64caf4b24ca200f8c8670d1a6f5ea828c39eccdae423bc2b2"}, + {file = "pymongo-4.8.0-cp312-cp312-win32.whl", hash = "sha256:47ec8c3f0a7b2212dbc9be08d3bf17bc89abd211901093e3ef3f2adea7de7a69"}, + {file = "pymongo-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8"}, + {file = "pymongo-4.8.0.tar.gz", hash = "sha256:454f2295875744dc70f1881e4b2eb99cdad008a33574bc8aaf120530f66c0cde"}, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +requires_python = ">=3.8" +summary = "Read key-value pairs from a .env file and set them as environment variables" +groups = ["default"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[[package]] +name = "python-jose" +version = "3.3.0" +summary = "JOSE implementation in Python" +groups = ["default"] +dependencies = [ + "ecdsa!=0.15", + "pyasn1", + "rsa", +] +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[[package]] +name = "python-jose" +version = "3.3.0" +extras = ["cryptography"] +summary = "JOSE implementation in Python" +groups = ["default"] +dependencies = [ + "cryptography>=3.4.0", + "python-jose==3.3.0", +] +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[[package]] +name = "rsa" +version = "4.9" +requires_python = ">=3.6,<4" +summary = "Pure-Python RSA implementation" +groups = ["default"] +dependencies = [ + "pyasn1>=0.1.3", +] +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[[package]] +name = "six" +version = "1.16.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python 2 and 3 compatibility utilities" +groups = ["default"] +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["default"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "starlette" +version = "0.38.2" +requires_python = ">=3.8" +summary = "The little ASGI library that shines." +groups = ["default"] +dependencies = [ + "anyio<5,>=3.4.0", + "typing-extensions>=3.10.0; python_version < \"3.10\"", +] +files = [ + {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"}, + {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python Library for Tom's Obvious, Minimal Language" +groups = ["default"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "uvicorn" +version = "0.30.6" +requires_python = ">=3.8" +summary = "The lightning-fast ASGI server." +groups = ["default"] +dependencies = [ + "click>=7.0", + "h11>=0.8", + "typing-extensions>=4.0; python_version < \"3.11\"", +] +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[[package]] +name = "websockets" +version = "12.0" +requires_python = ">=3.8" +summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +groups = ["default"] +files = [ + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +requires_python = ">=3.7" +summary = "Yet another URL library" +groups = ["default"] +dependencies = [ + "idna>=2.0", + "multidict>=4.0", + "typing-extensions>=3.7.4; python_version < \"3.8\"", +] +files = [ + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] From a6d443b41375a77d2a9fdb0605e362c6d38f807d Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Mon, 19 Aug 2024 01:39:12 +0300 Subject: [PATCH 41/42] Update test --- app/api/api_dynamic_tasks.py | 45 +++++++++++++++++++++++++++++++----- 1 file changed, 39 insertions(+), 6 deletions(-) diff --git a/app/api/api_dynamic_tasks.py b/app/api/api_dynamic_tasks.py index 08dcff0..42170ae 100644 --- a/app/api/api_dynamic_tasks.py +++ b/app/api/api_dynamic_tasks.py @@ -1,5 +1,6 @@ import datetime from collections.abc import Callable +from ipaddress import ip_address from typing import Annotated, Literal, Self, TypeAlias, cast from uuid import UUID @@ -15,7 +16,8 @@ from ..db.beanie import TaskDB, UserDB from ..utils import metrics from ..utils.log_helper import get_logger -from .api_tasks import CURRENT_TASK +from .api_tasks import CURRENT_TASK, get_task +import contextlib logger = get_logger("api.dynamic_tasks") @@ -72,22 +74,33 @@ class Detail(BaseModel): detail: Detail +class MultipleInfoRequest(BaseModel): + tasks: list[UUID] + + +class MultipleInfoResponse(BaseModel): + data: dict[UUID, str] + + _TT: TypeAlias = ExternalDynamicTaskInfo | ExternalDynamicTaskError ExternalDynamicTaskResp = TypeAdapter[_TT](_TT) class DynamicTasksClient(AsyncClient): def __init__(self) -> None: - if not settings.DYNAMIC_TASKS_CONTROLLER_TOKEN or not settings.DYNAMIC_TASKS_CONTROLLER: - return + logger.info("Trying to start dynamic tasks client") + + base_url = settings.DYNAMIC_TASKS_CONTROLLER or "" + x_token = settings.DYNAMIC_TASKS_CONTROLLER_TOKEN or "" super().__init__( - base_url=settings.DYNAMIC_TASKS_CONTROLLER, + base_url=base_url, headers={ - "X-Token": settings.DYNAMIC_TASKS_CONTROLLER_TOKEN, + "X-Token": x_token, }, timeout=httpx.Timeout(connect=5.0, read=120.0, write=5.0, pool=5.0), ) + logger.info(f"DTC startd with {base_url = } {x_token = }") def format_resp(self, resp: httpx.Response) -> str: info = ExternalDynamicTaskResp.validate_json(resp.text) @@ -136,7 +149,8 @@ async def stop(self, task_info: DynamicTaskInfo): return f"Status: {err.detail}" async def restart(self, task_info: DynamicTaskInfo): - pass + resp = await self.post("/api/restart", json=task_info.model_dump(mode="json")) + return self.format_resp(resp) async def info(self, task_info: DynamicTaskInfo) -> str: resp = await self.post("/api/info", json=task_info.model_dump(mode="json")) @@ -186,6 +200,25 @@ async def api_dynamic_task_restart(user: auth.CURR_USER, task: CURRENT_DYNAMIC_T return HTMLResponse(info) +@router.post("/infos") +async def api_dynamic_task_infos( + user: auth.CURR_USER, + client: CLIENT, + req: MultipleInfoRequest, +) -> MultipleInfoResponse: + resp = MultipleInfoResponse(data={}) + + for task_id in req.tasks: + try: + task = await get_dynamic_task(await get_task(task_id=task_id, user=user)) + except HTTPException: + continue + + resp.data[task_id] = await client.info(DynamicTaskInfo.build(task=task, user=user)) + + return resp + + @router.get("/info/{task_id}") async def api_dynamic_task_info(user: auth.CURR_USER, task: CURRENT_DYNAMIC_TASK, client: CLIENT) -> HTMLResponse: info = await client.info(DynamicTaskInfo.build(task=task, user=user)) From 6e9310ddda6a5580320734f441fe133e2cad3537 Mon Sep 17 00:00:00 2001 From: Rubikoid Date: Sat, 24 Aug 2024 19:26:08 +0300 Subject: [PATCH 42/42] Many fixes --- .gitignore | 6 + app/api/api_dynamic_tasks.py | 1 - app/cli/base.py | 14 +- app/cli/cmd/load.py | 6 +- app/view/static/dynamic_tasks.js | 67 +++ app/view/templates/macro.jhtml | 104 ++--- app/view/templates/task.jhtml | 4 + app/view/templates/tasks.jhtml | 10 +- dynamic_tasks_app/config.py | 3 + dynamic_tasks_app/connectors/compose.py | 12 + dynamic_tasks_app/connectors/kub/__init__.py | 4 +- dynamic_tasks_app/connectors/kub/api.py | 38 +- dynamic_tasks_app/web.py | 7 + pdm.lock | 434 ++++++++++++++++++- pyproject.toml | 25 +- 15 files changed, 613 insertions(+), 122 deletions(-) create mode 100644 app/view/static/dynamic_tasks.js diff --git a/.gitignore b/.gitignore index f07d444..3ad247f 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,7 @@ celerybeat.pid # Environments .env .venv +.linvenv env/ venv/ ENV/ @@ -147,5 +148,10 @@ cython_debug/ logs/ +yatb_prod + # env yatb.env +k3s.yaml +dynamic_tasks_app/wtf +yatb_state.json diff --git a/app/api/api_dynamic_tasks.py b/app/api/api_dynamic_tasks.py index 42170ae..5b8dd5c 100644 --- a/app/api/api_dynamic_tasks.py +++ b/app/api/api_dynamic_tasks.py @@ -17,7 +17,6 @@ from ..utils import metrics from ..utils.log_helper import get_logger from .api_tasks import CURRENT_TASK, get_task -import contextlib logger = get_logger("api.dynamic_tasks") diff --git a/app/cli/base.py b/app/cli/base.py index 8a215ed..c3fdc8f 100644 --- a/app/cli/base.py +++ b/app/cli/base.py @@ -1,16 +1,14 @@ import typer -from pydantic_settings import BaseSettings +from pydantic_settings import BaseSettings, SettingsConfigDict from rich.console import Console class Settings(BaseSettings): - files_url: str = "http://127.0.0.1:9999" - base_url: str = "http://127.0.0.1:8080" - - tasks_ip: str = "127.0.0.1" - - tasks_domain: str = "tasks.kksctf.ru" - flag_base: str = "kks" + model_config = SettingsConfigDict( + env_file="yatb.env", + env_file_encoding="utf-8", + extra="allow", + ) settings = Settings() diff --git a/app/cli/cmd/load.py b/app/cli/cmd/load.py index ad7260e..394680f 100644 --- a/app/cli/cmd/load.py +++ b/app/cli/cmd/load.py @@ -72,7 +72,11 @@ async def _a(): if not (task_src / "task.yaml").exists(): continue - task_info = parse_yaml_raw_as(FileTask, (task_src / "task.yaml").read_text()) + try: + task_info = parse_yaml_raw_as(FileTask, (task_src / "task.yaml").read_text()) + except Exception as ex: + c.print(f"ERROR!!! {task_src = } has bad yaml: {ex!r}") + continue if task_src not in state.task_to_uuid: created_task = await y.create_task_full_form(task_to_raw(task_info)) diff --git a/app/view/static/dynamic_tasks.js b/app/view/static/dynamic_tasks.js new file mode 100644 index 0000000..1e158ea --- /dev/null +++ b/app/view/static/dynamic_tasks.js @@ -0,0 +1,67 @@ +function load_all_dynamic_info() { + tasks = $(".dynamic_task_info_field").map((idx, info_field) => { + return info_field.dataset.id; + }).get(); + + req(api_list["api_dynamic_task_infos"], { data: { "tasks": tasks } }) + .then(get_json) + .then((data) => { return data; }, nok_toast_generator("Get dynamic task info")) + .then((data) => { + Object.entries(data.json.data).forEach(([task_id, info]) => { + set_status(task_id, info); + }); + }); +}; + +function set_status(task_id, info) { + $("#data-" + task_id).html(info); + updateMacy(); +} + +$(".dynamic_task_info").click(function (event) { + event.preventDefault(); + let task_id = this.dataset.id; + preq(api_list["api_dynamic_task_info"], { "task_id": task_id }, { method: 'GET' }) + .then(get_text) + .then((data) => { return data; }, nok_toast_generator("Get dynamic task info")) + .then((data) => { + set_status(task_id, data.text); + }); +}); + +$(".dynamic_task_start").click(function (event) { + event.preventDefault(); + let task_id = this.dataset.id; + + + set_status(task_id, "Task building in process") + + preq(api_list["api_dynamic_task_start"], { "task_id": task_id }, { method: 'GET' }) + .then(get_text) + .then(ok_toast_generator("Start dynamic task"), nok_toast_generator("Start dynamic task")) + .then((data) => { + set_status(task_id, data.text); + }); +}); + +$(".dynamic_task_stop").click(function (event) { + event.preventDefault(); + let task_id = this.dataset.id; + preq(api_list["api_dynamic_task_stop"], { "task_id": task_id }, { method: 'GET' }) + .then(get_text) + .then(ok_toast_generator("Stop dynamic task"), nok_toast_generator("Stop dynamic task")) + .then((data) => { + set_status(task_id, data.text); + }); +}); + +$(".dynamic_task_restart").click(function (event) { + event.preventDefault(); + let task_id = this.dataset.id; + preq(api_list["api_dynamic_task_restart"], { "task_id": task_id }, { method: 'GET' }) + .then(get_text) + .then(ok_toast_generator("Restart dynamic task"), nok_toast_generator("Restart dynamic task")) + .then((data) => { + set_status(task_id, data.text); + }); +}); diff --git a/app/view/templates/macro.jhtml b/app/view/templates/macro.jhtml index ff0cf82..514f1bf 100644 --- a/app/view/templates/macro.jhtml +++ b/app/view/templates/macro.jhtml @@ -81,7 +81,12 @@ - + {% if user and user.admin_checker() %} @@ -130,52 +135,22 @@ {% endmacro %} {% macro dynamic_task_script(user=None) %} - + {% if user %} + + {% endif %} + + {% if user and user.admin_checker() %} + + }); + + {% endif %} {% endmacro %} diff --git a/app/view/templates/task.jhtml b/app/view/templates/task.jhtml index 70e0452..1e214e2 100644 --- a/app/view/templates/task.jhtml +++ b/app/view/templates/task.jhtml @@ -19,5 +19,9 @@ {% block footer %} {{ super() }} +{% endblock %} + +{% block scripts %} + {{ super() }} {{ macro.dynamic_task_script(curr_user) }} {% endblock %} diff --git a/app/view/templates/tasks.jhtml b/app/view/templates/tasks.jhtml index 6fcf614..e36122c 100644 --- a/app/view/templates/tasks.jhtml +++ b/app/view/templates/tasks.jhtml @@ -71,6 +71,12 @@ {{ super() }} {{ macro.dynamic_task_script(curr_user) }}