Skip to content

Commit

Permalink
add reporting and action endpoints
Browse files Browse the repository at this point in the history
  • Loading branch information
kheina committed Jul 23, 2024
1 parent f0c4eeb commit cbcd955
Show file tree
Hide file tree
Showing 35 changed files with 1,667 additions and 151 deletions.
9 changes: 6 additions & 3 deletions authenticator/authenticator.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@
"""


KVS: KeyValueStore = KeyValueStore('kheina', 'token')
BotLoginSerializer: AvroSerializer = AvroSerializer(BotLogin)
BotLoginDeserializer: AvroDeserializer = AvroDeserializer(BotLogin)

Expand Down Expand Up @@ -127,6 +126,7 @@ async def get_id(self: Self, key: BotType) -> int :
class Authenticator(SqlInterface, Hashable) :

EmailRegex = re_compile(r'^(?P<user>[A-Z0-9._%+-]+)@(?P<domain>[A-Z0-9.-]+\.[A-Z]{2,})$', flags=IGNORECASE)
KVS: KeyValueStore

def __init__(self) :
Hashable.__init__(self)
Expand All @@ -147,6 +147,9 @@ def __init__(self) :
'id': 0,
}

if not getattr(Authenticator, 'KVS', None) :
Authenticator.KVS = KeyValueStore('kheina', 'token')


def _validateEmail(self, email: str) -> Dict[str, str] :
e = Authenticator.EmailRegex.search(email)
Expand Down Expand Up @@ -248,7 +251,7 @@ async def generate_token(self, user_id: int, token_data: dict) -> TokenResponse
algorithm=self._token_algorithm,
fingerprint=token_data.get('fp', '').encode(),
)
await KVS.put_async(guid.bytes, token_info, self._token_expires_interval)
await Authenticator.KVS.put_async(guid.bytes, token_info, self._token_expires_interval)

version = self._token_version.encode()
content = b64encode(version) + b'.' + b64encode(load)
Expand All @@ -268,7 +271,7 @@ async def generate_token(self, user_id: int, token_data: dict) -> TokenResponse
async def logout(self, guid: RefId) :
# since this endpoint is behind user.authenticated, we already know that the
# token exists and all the information is correct. we just need to delete it.
await KVS.remove_async(guid)
await Authenticator.KVS.remove_async(guid)


async def fetchPublicKey(self, key_id, algorithm: Optional[AuthAlgorithm] = None) -> PublicKeyResponse :
Expand Down
3 changes: 1 addition & 2 deletions avro_schema_repository/schema_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@
import ujson
from avrofastapi.schema import AvroSchema

from shared.base64 import b64decode, b64encode
from shared.base64 import b64encode
from shared.caching import AerospikeCache
from shared.caching.key_value_store import KeyValueStore
from shared.crc import CRC
from shared.exceptions.http_error import HttpErrorHandler, NotFound
from shared.sql import SqlInterface
from shared.utilities import getFullyQualifiedClassName


KVS: KeyValueStore = KeyValueStore('kheina', 'avro_schemas', local_TTL=60)
Expand Down
2 changes: 1 addition & 1 deletion configs/configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def getFunding(self) -> int :
@HttpErrorHandler('retrieving config')
@AerospikeCache('kheina', 'configs', '{config}', _kvs=KVS)
async def getConfig[T: BaseModel](self, config: ConfigType, _: Type[T]) -> T :
data: List[bytes] = await self.query_async("""
data: tuple[memoryview] = await self.query_async("""
SELECT bytes
FROM kheina.public.configs
WHERE key = %s;
Expand Down
142 changes: 142 additions & 0 deletions db/2/00-mod-queue.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
create table public.reports (
report_id bigint generated always as identity primary key,
report_type smallint not null,
created timestamptz default now() not null,
reporter bigint null
references public.users (user_id)
on update cascade
on delete set null,
assignee bigint null
references public.users (user_id)
on update cascade
on delete set null,
data bytea not null,
response text null
);

create table public.mod_queue (
queue_id bigint generated always as identity primary key,
assignee bigint null
references public.users (user_id)
on update cascade
on delete set null,
report_id bigint not null
references public.reports (report_id)
on update cascade
on delete cascade,
unique(report_id)
);

create function public.insert_into_queue_on_report() returns trigger
as $$
begin

insert into public.mod_queue
(report_id, assignee)
values
(new.report_id, new.assignee);

return new;

end;
$$ language plpgsql;

create trigger insert_into_queue_on_report after insert on public.reports
for each row execute procedure public.insert_into_queue_on_report();

create function public.update_assignee_in_report() returns trigger
as $$
begin

update public.reports
set assignee = new.assignee
where
report_id = new.report_id;

return new;

end;
$$ language plpgsql;

create trigger update_assignee_in_report after update on public.mod_queue
for each row execute procedure public.update_assignee_in_report();

create table public.mod_actions (
action_id bigint generated always as identity primary key,
report_id bigint not null
references public.reports (report_id)
on update cascade
on delete cascade,
post_id bigint null
references public.posts (post_id)
on update cascade
on delete set null,
user_id bigint null
references public.users (user_id)
on update cascade
on delete set null,
assignee bigint null
references public.users (user_id)
on update cascade
on delete set null,
created timestamptz default now() not null,
completed timestamptz null,
reason text not null,
action_type smallint not null,
action bytea not null
);

create index mod_actions_post_id_report_id on public.mod_actions (post_id, report_id);
create index mod_actions_post_id_action_id on public.mod_actions (post_id, action_id);
create index mod_actions_user_id_report_id on public.mod_actions (user_id, report_id);
create index mod_actions_user_id_action_id on public.mod_actions (user_id, action_id);
create index mod_actions_report_id on public.mod_actions (report_id);

create function public.delete_from_queue_on_action() returns trigger
as $$
begin

delete from public.mod_queue
where mod_queue.report_id = new.report_id;

return new;

end;
$$ language plpgsql;

create trigger delete_from_queue_on_action after insert on public.mod_actions
for each row execute procedure public.delete_from_queue_on_action();

create table public.bans (
ban_id bigint not null generated always as identity,
ban_type smallint not null,
action_id bigint not null
references public.mod_actions (action_id)
on update cascade
on delete cascade,
user_id bigint not null
references public.users (user_id)
on update cascade
on delete cascade,
created timestamptz default now() not null,
completed timestamptz not null,
reason text not null,
primary key (user_id, ban_id),
unique (user_id, completed),
unique (ban_id)
);

create table public.ip_bans (
ip_hash bytea not null,
ban_id bigint not null
references public.bans (ban_id)
on update cascade
on delete cascade,
primary key (ip_hash, ban_id)
);

alter table public.posts
add column locked boolean
default false not null;

create index posts_post_id_privacy_locked on public.posts (post_id, privacy, locked);
9 changes: 0 additions & 9 deletions db/test/02-remove-internal-bot-type.sql

This file was deleted.

1 change: 1 addition & 0 deletions deployment.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
configure docker to use the gcloud docker image repo
```sh
gcloud auth configure-docker <REGION>-docker.pkg.dev
docker push us-central1-docker.pkg.dev/kheinacom/fuzzly-repo/fuzzly-backend:$(git rev-parse --short HEAD)
Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3'
services:
db:
image: postgres:13-alpine
Expand Down
Binary file removed images/default-icon.png
Binary file not shown.
2 changes: 1 addition & 1 deletion init.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
from cryptography.hazmat.primitives.ciphers.aead import AESGCM

from authenticator.authenticator import Authenticator
from authenticator.models import LoginRequest
from shared.backblaze import B2Interface
from shared.base64 import b64encode
Expand Down Expand Up @@ -100,6 +99,7 @@ def uploadDefaultIcon() -> None :

@cli.command('admin')
async def createAdmin() -> LoginRequest :
from authenticator.authenticator import Authenticator
"""
creates a default admin account on your fuzzly instance
"""
Expand Down
2 changes: 1 addition & 1 deletion k8s.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ spec:
spec:
containers:
- name: fuzzly-backend
image: us-central1-docker.pkg.dev/kheinacom/fuzzly-repo/fuzzly-backend@sha256:3ca231d26c2050b8117f4ab724e99048308c3f9677713232bc7ef3ce822dc784
image: us-central1-docker.pkg.dev/kheinacom/fuzzly-repo/fuzzly-backend@sha256:8019b7428df8093bd92154270f51fefc8072a23ae62cff6787a5807fe8a5e24f
env:
- name: pod_ip
valueFrom:
Expand Down
Loading

0 comments on commit cbcd955

Please sign in to comment.