Skip to content

Commit

Permalink
fix linter errors
Browse files Browse the repository at this point in the history
  • Loading branch information
mki-c2c committed Feb 6, 2025
1 parent 7f2ee82 commit 4f838dd
Show file tree
Hide file tree
Showing 6 changed files with 104 additions and 59 deletions.
35 changes: 21 additions & 14 deletions backend/maelstro/core/clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from functools import cache
from io import BytesIO
from typing import Any
from geonetwork import GnApi
from geoservercloud.services import RestService # type: ignore
from maelstro.metadata import Meta
from maelstro.config import app_config as config
Expand All @@ -28,23 +29,22 @@ def __init__(self, src_name: str, dst_name: str, uuid: str):
self.geo_hnd: GeorchestraHandler
self.checked_workspaces: set[str] = set()
self.checked_datastores: set[str] = set()
self.services = {}

@property
@cache
def gn_src(self):
@cache # pylint: disable=method-cache-max-size-none
def gn_src(self) -> GnApi:
return self.geo_hnd.get_gn_service(self.src_name, is_source=True)

@property
@cache
def gn_dst(self):
@cache # pylint: disable=method-cache-max-size-none
def gn_dst(self) -> GnApi:
return self.geo_hnd.get_gn_service(self.dst_name, is_source=False)

# gs_src cannot be a fixed property since there may be several source Geoservers

@property
@cache
def gs_dst(self):
@cache # pylint: disable=method-cache-max-size-none
def gs_dst(self) -> RestService:
return self.geo_hnd.get_gs_service(self.dst_name, is_source=False)

def clone_dataset(
Expand Down Expand Up @@ -146,7 +146,7 @@ def clone_layers(self) -> None:

# styles must be available when cloning layers
if self.copy_layers:
for layer_data in layers.values():
for layer_name, layer_data in layers.items():
self.clone_layer(gs_src, layer_name, layer_data)

def get_styles_from_layer(self, layer_data: dict[str, Any]) -> dict[str, Any]:
Expand All @@ -157,8 +157,7 @@ def get_styles_from_layer(self, layer_data: dict[str, Any]) -> dict[str, Any]:
# as a dict, it must be converted to a list of dicts
additional_styles = [additional_styles]
all_styles = {
style["name"]: style
for style in [default_style] + additional_styles
style["name"]: style for style in [default_style] + additional_styles
}
return all_styles

Expand All @@ -168,7 +167,9 @@ def get_workspaces_from_style(self, style: dict[str, Any]) -> dict[str, Any]:
style["workspace"]: None
}

def get_stores_from_layers(self, gs_src: RestService, layers: dict[str, Any]) -> dict[str, Any]:
def get_stores_from_layers(
self, gs_src: RestService, layers: dict[GsLayer, Any]
) -> dict[str, Any]:
stores = {}
resources = {
layer_data["layer"]["resource"]["name"]: layer_data["layer"]["resource"]
Expand All @@ -184,7 +185,9 @@ def get_stores_from_layers(self, gs_src: RestService, layers: dict[str, Any]) ->
stores[store["name"]] = store
return stores

def get_workspaces_from_store(self, gs_src: RestService, store: dict[str, Any]) -> dict[str, Any]:
def get_workspaces_from_store(
self, gs_src: RestService, store: dict[str, Any]
) -> dict[str, Any]:
store_class = store["@class"]
store_route = store["href"].replace(gs_src.url, "")
store_resp = gs_src.rest_client.get(store_route)
Expand Down Expand Up @@ -279,7 +282,9 @@ def clone_layer(
)
raise_for_status(resp)

resp = self.gs_dst.rest_client.put(f"/rest/layers/{layer_name}", json=layer_data)
resp = self.gs_dst.rest_client.put(
f"/rest/layers/{layer_name}", json=layer_data
)
raise_for_status(resp)

def clone_style(
Expand All @@ -306,7 +311,9 @@ def clone_style(
"/styles",
style_route,
)
dst_style = self.gs_dst.rest_client.post(style_post_route, json=style_info)
dst_style = self.gs_dst.rest_client.post(
style_post_route, json=style_info
)
# raise_for_status(dst_style)

dst_style_def = self.gs_dst.rest_client.put(
Expand Down
5 changes: 2 additions & 3 deletions backend/maelstro/core/georchestra.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,14 @@
from requests.exceptions import HTTPError
from maelstro.config import ConfigError, app_config as config
from .operations import (
LogCollectionHandler,
log_handler,
gs_logger,
)
from .exceptions import ParamError, MaelstroDetail, AuthError


class GeorchestraHandler:
def __init__(self, log_handler: LogCollectionHandler):
def __init__(self) -> None:
self.log_handler = log_handler

def get_gn_service(self, instance_name: str, is_source: bool) -> GnApi:
Expand Down Expand Up @@ -85,5 +84,5 @@ def get_georchestra_handler() -> Iterator[GeorchestraHandler]:
log_handler.responses.clear()
log_handler.properties["start_time"] = datetime.now()
log_handler.valid = True
yield GeorchestraHandler(log_handler)
yield GeorchestraHandler()
log_handler.valid = False
11 changes: 8 additions & 3 deletions backend/maelstro/core/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,13 @@

def setup_exception_handlers(app: FastAPI) -> None:
@app.exception_handler(HTTPException)
async def handle_fastapi_exception(request: Request, err: GnException) -> None:
log_request_to_db(err.status_code, request, log_handler)
async def handle_fastapi_exception(request: Request, err: HTTPException) -> Any:
log_request_to_db(
err.status_code,
request,
log_handler.properties,
log_handler.get_json_responses(),
)
return await http_exception_handler(request, err)

@app.exception_handler(GnException)
Expand Down Expand Up @@ -67,7 +72,7 @@ def __init__(self) -> None:
super().__init__()
self.responses: list[Response | None | dict[str, Any]] = []
self.valid = False
self.properties = {}
self.properties: dict[str, Any] = {}

def emit(self, record: logging.LogRecord) -> None:
try:
Expand Down
80 changes: 52 additions & 28 deletions backend/maelstro/logging/psql_logger.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
from datetime import datetime
from sqlalchemy import Column, Table, Integer, String, Boolean, DateTime, create_engine, MetaData
from typing import Any
from fastapi import Request
from sqlalchemy import (
Engine,
Column,
Table,
Integer,
String,
Boolean,
DateTime,
create_engine,
MetaData,
)
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
Expand All @@ -11,26 +23,26 @@


DB_CONFIG = {
"host": "database",
"port": 5432,
"login": "georchestra",
"password": "georchestra",
"database": "georchestra",
"schema": "maelstro",
"table": "logs",
"host": "database",
"port": 5432,
"login": "georchestra",
"password": "georchestra",
"database": "georchestra",
"schema": "maelstro",
"table": "logs",
}
DB_CONFIG.update(config.config.get("db_logging", {}))

SCHEMA = DB_CONFIG["schema"]
SCHEMA = str(DB_CONFIG.get("schema"))
DB_URL = (
f"postgresql://{DB_CONFIG['login']}:{DB_CONFIG['password']}@{DB_CONFIG['host']}:"
f"{DB_CONFIG['port']}/{DB_CONFIG['database']}"
)


class Log(Base):
__tablename__ = 'logs'
__table_args__ = {'schema': SCHEMA}
class Log(Base): # type: ignore
__tablename__ = "logs"
__table_args__ = {"schema": SCHEMA}

id = Column(Integer, primary_key=True, autoincrement=True)
start_time = Column(DateTime, nullable=False, default=datetime.now())
Expand All @@ -50,43 +62,51 @@ class Log(Base):
copy_styles = Column(Boolean, nullable=False, default=False)
details = Column(JSONB, nullable=True)

def to_dict(self, get_details=False):
def to_dict(self, get_details=False) -> dict[str, Any]: # type: ignore
return {
field.name: getattr(self, field.name)
for field in self.__table__.c
if get_details or field.name != "details"
}


def to_bool(param: str) -> bool:
def to_bool(param: str | None) -> bool:
return TypeAdapter(bool).validate_python(param)


def log_request_to_db(status_code, request, log_handler):
def log_request_to_db(
status_code: int,
request: Request,
properties: dict[str, Any],
operations: list[dict[str, Any]],
) -> None:
record = {
"start_time": log_handler.properties.get("start_time"),
"start_time": properties.get("start_time"),
"end_time": datetime.now(),
"first_name": request.headers.get("sec-firstname"),
"last_name": request.headers.get("sec-lastname"),
"status_code": status_code,
"dataset_uuid": request.query_params.get("metadataUuid"),
"src_name": request.query_params.get("src_name"),
"dst_name": request.query_params.get("dst_name"),
"src_title": log_handler.properties.get("src_title"),
"dst_title": log_handler.properties.get("dst_title"),
"src_title": properties.get("src_title"),
"dst_title": properties.get("dst_title"),
"copy_meta": to_bool(request.query_params.get("copy_meta")),
"copy_layers": to_bool(request.query_params.get("copy_layers")),
"copy_styles": to_bool(request.query_params.get("copy_styles")),
"details": log_handler.get_json_responses(),
"details": operations,
}
log_to_db(record)


def get_logs(size, offset, get_details=False):
def get_logs(size: int, offset: int, get_details: bool = False) -> list[dict[str, Any]]:
with Session(get_engine()) as session:
return [
row.to_dict(get_details)
for row in session.query(Log).order_by(Log.id.desc()).offset(offset).limit(size)
for row in session.query(Log)
.order_by(Log.id.desc())
.offset(offset)
.limit(size)
]


Expand All @@ -101,34 +121,38 @@ def format_log(row: Log) -> str:
source = f'{row.src_name}:{row.dataset_uuid} - "{row.src_title}"'
destination = (
f'{row.dst_name} - "{row.dst_title}" ({", ".join(operations)})'
if operations else "n/a (copy_meta=false, copy_layers=false, copy_styles=false)"
if operations
else "n/a (copy_meta=false, copy_layers=false, copy_styles=false)"
)
return f"[{row.start_time}]: {status} {user} copie {source} vers {destination}"


def format_logs(size, offset):
def format_logs(size: int, offset: int) -> list[str]:
with Session(get_engine()) as session:
return [
format_log(row)
for row in session.query(Log).order_by(Log.id.desc()).offset(offset).limit(size)
for row in session.query(Log)
.order_by(Log.id.desc())
.offset(offset)
.limit(size)
]


def log_to_db(record):
def log_to_db(record: dict[str, Any]) -> None:
with Session(get_engine()) as session:
session.add(Log(**record))
session.commit()


def get_engine():
def get_engine() -> Engine:
return create_engine(DB_URL)


def read_db_table(name="logs"):
def read_db_table(name: str = "logs") -> Table:
engine = get_engine()
return Table("logs", MetaData(schema=SCHEMA), autoload_with=engine)


def create_db_table():
def create_db_table() -> None:
engine = get_engine()
Base.metadata.create_all(engine)
23 changes: 15 additions & 8 deletions backend/maelstro/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,12 @@
from maelstro.metadata import Meta
from maelstro.core import CloneDataset
from maelstro.core.operations import log_handler, setup_exception_handlers
from maelstro.logging.psql_logger import create_db_table, log_request_to_db, get_logs, format_logs
from maelstro.logging.psql_logger import (
create_db_table,
log_request_to_db,
get_logs,
format_logs,
)
from maelstro.common.models import SearchQuery


Expand Down Expand Up @@ -159,7 +164,9 @@ def put_dataset_copy(
)
clone_ds = CloneDataset(src_name, dst_name, metadataUuid)
operations = clone_ds.clone_dataset(copy_meta, copy_layers, copy_styles, accept)
log_request_to_db(200, request, log_handler)
log_request_to_db(
200, request, log_handler.properties, log_handler.get_json_responses()
)
if accept == "application/json":
return operations
return PlainTextResponse("\n".join(operations))
Expand All @@ -169,14 +176,14 @@ def put_dataset_copy(
"/logs",
responses={
200: {"content": {"text/plain": {}, "application/json": {}}},
}
},
)
def get_user_logs(
size: int = 5,
offset: int = 0,
get_details: bool = False,
accept: Annotated[str, Header(include_in_schema=False)] = "text/plain"
):
size: int = 5,
offset: int = 0,
get_details: bool = False,
accept: Annotated[str, Header(include_in_schema=False)] = "text/plain",
) -> Any:
if accept == "application/json":
return get_logs(size, offset, get_details)
return PlainTextResponse("\n".join(format_logs(size, offset)))
Expand Down
9 changes: 6 additions & 3 deletions backend/maelstro/metadata/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,14 @@ def __init__(self, xml_bytes: bytes, schema: str = "iso19139"):

def get_title(self) -> str:
xml_root = etree.parse(BytesIO(self.xml_bytes))
return xml_root.find(
title_node = xml_root.find(
f".//{self.prefix}:MD_DataIdentification/{self.prefix}:citation"
f"/{self.prefix}:CI_Citation/{self.prefix}:title/",
self.namespaces
).text
self.namespaces,
)
if title_node is not None:
return title_node.text or ""
return ""

def get_ogc_geoserver_layers(self) -> list[dict[str, str]]:
xml_root = etree.parse(BytesIO(self.xml_bytes))
Expand Down

0 comments on commit 4f838dd

Please sign in to comment.