Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: use flyway for db migrations #1965

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .env.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ DATABASE_URL=http://localhost:3000
DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI

# Postgres Database
DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres
DATABASE_CONNECTION=postgresql:/localhost:5432/postgres?user=postgres&password=postgres

# Cluster
CLUSTER_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
env:
DATABASE_URL: http://localhost:3000
DATABASE_TOKEN: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI
DATABASE_CONNECTION: postgresql://postgres:postgres@localhost:5432/postgres
DATABASE_CONNECTION: postgresql://localhost:5432/postgres?user=postgres&password=postgres
deploy-dev:
name: Deploy Dev
if: github.event_name == 'pull_request' && github.ref != 'refs/heads/main'
Expand Down
51 changes: 51 additions & 0 deletions packages/api/db/flyway-config.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
const path = require('path')
const dotenv = require('dotenv')

dotenv.config({ path: path.join(__dirname, '../../../.env') })
const { env } = process

if (!env.DATABASE_CONNECTION) {
throw new Error('Required env variable DATABASE_CONNECTION missing')
}

let placeholders = {
NFT_STORAGE_USER: env.NFT_STORAGE_USER || 'CURRENT_USER',
NFT_STORAGE_STATS_USER: env.NFT_STORAGE_STATS_USER || 'CURRENT_USER',
}

if (
env.DAG_CARGO_HOST &&
env.DAG_CARGO_DATABASE &&
env.DAG_CARGO_USER &&
env.DAG_CARGO_PASSWORD
) {
placeholders.DAG_CARGO_TEST_MODE = 'false'
placeholders.DAG_CARGO_HOST = env.DAG_CARGO_HOST
placeholders.DAG_CARGO_DATABASE = env.DAG_CARGO_DATABASE
placeholders.DAG_CARGO_USER = env.DAG_CARGO_USER
placeholders.DAG_CARGO_PASSWORD = env.DAG_CARGO_PASSWORD
} else {
placeholders.DAG_CARGO_TEST_MODE = 'true'
placeholders.DAG_CARGO_HOST = 'test'
placeholders.DAG_CARGO_DATABASE = 'test'
placeholders.DAG_CARGO_USER = 'test'
placeholders.DAG_CARGO_PASSWORD = 'test'
}

// To get the correct command line args, we need to give node-flywaydb keys like
// `{ 'placeholders.VARIABLE_NAME': 'VALUE' }`, which translates to the cli arg
// `-placeholders.VARIABLE_NAME=VALUE`
placeholders = Object.fromEntries(
Object.entries(placeholders).map(([key, val]) => [`placeholders.${key}`, val])
)

module.exports = {
flywayArgs: {
url: `jdbc:${env.DATABASE_CONNECTION}`,
schemas: 'public',
locations: 'filesystem:db/migrations',
sqlMigrationSuffixes: '.sql',
baselineOnMigrate: true,
...placeholders,
},
}

This file was deleted.

This file was deleted.

27 changes: 0 additions & 27 deletions packages/api/db/migrations/003-user_tag_proposals.sql

This file was deleted.

16 changes: 16 additions & 0 deletions packages/api/db/migrations/V0001__initial_config.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
-- PG doesn't support ALTER DATABASE CURRENT, and the db name is different between local/staging/production
-- So we have to execute using variable subsitution
DO $$
BEGIN
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET default_statistics_target = 1000';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET enable_partitionwise_aggregate = on';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET enable_partitionwise_join = on';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_workers_per_gather = 8';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_workers = 16';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_maintenance_workers = 8';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET jit = on';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET idle_in_transaction_session_timeout = ''1min''';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET lock_timeout = ''1min''';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET statement_timeout = ''3min''';
END
$$;
243 changes: 243 additions & 0 deletions packages/api/db/migrations/V0002__initial_tables.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,243 @@
-- Auth key blocked status type is the type of blocking that has occurred on the api
-- key. These are primarily used by the admin app.
CREATE TYPE auth_key_blocked_status_type AS ENUM (
-- The api key is blocked.
'Blocked',
-- The api key is unblocked.
'Unblocked'
);

-- User tags are associated to a user for the purpose of granting/restricting them
-- in the application.
CREATE TYPE user_tag_type AS ENUM
(
'HasAccountRestriction',
'HasDeleteRestriction',
'HasPsaAccess',
'HasSuperHotAccess',
'StorageLimitBytes'
);

CREATE TYPE user_tag_proposal_decision_type AS ENUM
(
'Approved',
'Declined'
);

-- Pin status type is a subset of IPFS Cluster "TrackerStatus".
-- https://github.com/ipfs/ipfs-cluster/blob/54c3608899754412861e69ee81ca8f676f7e294b/api/types.go#L52-L83
CREATE TYPE pin_status_type AS ENUM (
-- An error occurred pinning.
'PinError',
-- The item has been queued for pinning on the IPFS daemon.
'PinQueued',
-- The IPFS daemon has pinned the item.
'Pinned',
-- The IPFS daemon is currently pinning the item.
'Pinning'
);

-- Service type is the place/location/organisation that is pinning the content.
CREATE TYPE service_type AS ENUM (
-- The NFT.Storage cluster in Pinata.
'Pinata',
-- The original NFT.Storage cluster.
'IpfsCluster',
-- The current cluster, originally commissioned for niftysave.
'IpfsCluster2',
-- New cluster with flatfs and better DHT
'IpfsCluster3'
);

-- Upload type is the type of received upload data.
CREATE TYPE upload_type AS ENUM (
-- A CAR file upload.
'Car',
-- A raw blob upload in the request body.
'Blob',
-- A multi file upload using a multipart request.
'Multipart',
-- An item pinned using the pinning service API.
'Remote',
-- An "IPNFT" uploaded with the metadata store API.
'Nft'
);

-- A user of NFT.Storage
CREATE TABLE IF NOT EXISTS public.user
(
id BIGSERIAL PRIMARY KEY,
magic_link_id TEXT UNIQUE,
github_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
picture TEXT,
email TEXT NOT NULL,
-- Cryptographic public address of the user.
public_address TEXT UNIQUE,
did TEXT UNIQUE,
github jsonb,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL
);
CREATE INDEX IF NOT EXISTS user_updated_at_idx ON public.user (updated_at);

CREATE TABLE IF NOT EXISTS public.user_tag
(
id BIGSERIAL PRIMARY KEY,
user_id BIGINT NOT NULL REFERENCES public.user (id),
tag user_tag_type NOT NULL,
value TEXT NOT NULL,
reason TEXT NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at TIMESTAMP WITH TIME ZONE
);
CREATE UNIQUE INDEX IF NOT EXISTS user_tag_is_deleted_idx ON user_tag (user_id, tag, deleted_at)
WHERE deleted_at IS NOT NULL;
CREATE UNIQUE INDEX IF NOT EXISTS user_tag_is_not_deleted_idx ON user_tag (user_id, tag)
WHERE deleted_at IS NULL;

-- These are user_tag(s) that a user has requested. It is assumed that a user can
-- only request one type of user_tag at any given time, hence the index associated
-- with this table. The admin app will have to create an entry in the user_tag
-- table once a proposal has been approved. These proposals are visible to both
-- users and admins.
CREATE TABLE IF NOT EXISTS public.user_tag_proposal
(
id BIGSERIAL PRIMARY KEY,
user_id BIGINT NOT NULL REFERENCES public.user (id),
tag user_tag_type NOT NULL,
proposed_tag_value TEXT NOT NULL,
user_proposal_form jsonb NOT NULL,
admin_decision_message TEXT ,
admin_decision_type user_tag_proposal_decision_type,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at TIMESTAMP WITH TIME ZONE
);
-- Note: We index active user_tag_proposals with deleted_at IS NULL to enforce only 1 active
-- tag type proposal per user. We allow there to be multiple deleted user_tag_proposals of the same type per
-- user to handle the scenario where a user has been denied multiple times by admins.
-- If deleted_at is populated, it means the user_tag_proposal has been cancelled by
-- a user or a decision has been provided by an admin.
CREATE UNIQUE INDEX IF NOT EXISTS user_tag_proposal_is_not_deleted_idx ON user_tag_proposal (user_id, tag)
WHERE deleted_at IS NULL;

-- API authentication tokens.
CREATE TABLE IF NOT EXISTS auth_key
(
id BIGSERIAL PRIMARY KEY,
-- User assigned name.
name TEXT NOT NULL,
-- The JWT used by the user to access the API.
secret TEXT NOT NULL,
user_id BIGINT NOT NULL REFERENCES public.user (id),
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at TIMESTAMP WITH TIME ZONE
);

CREATE TABLE IF NOT EXISTS auth_key_history
(
id BIGSERIAL PRIMARY KEY,
status auth_key_blocked_status_type NOT NULL,
reason TEXT NOT NULL,
auth_key_id BIGSERIAL NOT NULL REFERENCES auth_key (id),
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at TIMESTAMP WITH TIME ZONE
);

-- Details of the root of a file/directory stored on NFT.Storage.
CREATE TABLE IF NOT EXISTS content
(
-- Normalized base32 v1.
cid TEXT PRIMARY KEY,
-- Size of the DAG in bytes. Set if known on upload or for partials is set
-- when content is fully pinned in at least one location.
dag_size BIGINT,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL
);

CREATE INDEX IF NOT EXISTS content_updated_at_idx ON content (updated_at);
CREATE INDEX IF NOT EXISTS content_inserted_at_idx ON content (inserted_at);
CREATE UNIQUE INDEX content_cid_with_size_idx ON content (cid) INCLUDE (dag_size);


-- Information for piece of content pinned in IPFS.
CREATE TABLE IF NOT EXISTS pin
(
id BIGSERIAL PRIMARY KEY,
-- Overall pinning status at this location (may be pinned on multiple nodes).
status pin_status_type NOT NULL,
-- The root CID of the pinned content, normalized as base32 v1.
content_cid text NOT NULL REFERENCES content (cid),
-- The place where this item is pinned.
service service_type NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
UNIQUE (content_cid, service)
);

CREATE INDEX IF NOT EXISTS pin_composite_service_and_status_idx ON pin (service, status);
CREATE INDEX IF NOT EXISTS pin_composite_updated_at_and_content_cid_idx ON pin (updated_at, content_cid);

-- An upload created by a user.
CREATE TABLE IF NOT EXISTS upload
(
id BIGSERIAL PRIMARY KEY,
user_id BIGINT NOT NULL REFERENCES public.user (id),
-- User authentication token that was used to upload this content.
-- Note: maybe be null when the user upload through the website.
key_id BIGINT REFERENCES auth_key (id),
-- The root CID of the uploaded content, normalized as base32 v1.
content_cid TEXT NOT NULL REFERENCES content (cid),
-- The root CID of the uploaded content, as provided by the user.
source_cid TEXT NOT NULL,
-- MIME type of the upload data as sent in the request.
mime_type TEXT,
type upload_type NOT NULL,
-- User provided name for this upload.
name TEXT,
-- List of files in the upload if the type was Mutlipart or Nft.
files jsonb,
-- User provided multiaddrs of origins of this upload (used by the pinning
-- service API).
origins jsonb,
-- Custom metadata. Currently used in 2 places:
-- 1. Pinning Service API user provided `Record<string, string>`.
-- 2. Metaplex endpoint `/metaplex/upload` to store details of the Metaplex user.
meta jsonb,
backup_urls text[],
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at TIMESTAMP WITH TIME ZONE,
UNIQUE (user_id, source_cid)
);

CREATE INDEX IF NOT EXISTS upload_inserted_at_idx ON upload (inserted_at);
CREATE INDEX IF NOT EXISTS upload_content_cid_idx ON upload (content_cid);
CREATE INDEX IF NOT EXISTS upload_source_cid_idx ON upload (source_cid);
CREATE INDEX IF NOT EXISTS upload_updated_at_idx ON upload (updated_at);
CREATE INDEX IF NOT EXISTS upload_type_idx ON upload (type);

CREATE VIEW admin_search as
select
u.id::text as user_id,
u.email as email,
ak.secret as token,
ak.id::text as token_id,
ak.deleted_at as deleted_at,
akh.inserted_at as reason_inserted_at,
akh.reason as reason,
akh.status as status
from public.user u
full outer join auth_key ak on ak.user_id = u.id
full outer join (select * from auth_key_history where deleted_at is null) as akh on akh.auth_key_id = ak.id;

-- Metric contains the current values of collected metrics.
CREATE TABLE IF NOT EXISTS metric
(
name TEXT PRIMARY KEY,
value BIGINT NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL
);
16 changes: 16 additions & 0 deletions packages/api/db/migrations/V0003__initial_dagcargo.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
DO
$do$
BEGIN
IF '${DAG_CARGO_TEST_MODE}' != 'true' THEN

CREATE SCHEMA IF NOT EXISTS cargo;

-- Import dag cargo schema
IMPORT FOREIGN SCHEMA cargo
LIMIT TO (aggregate_entries, aggregates, deals, dags, metrics, metrics_log)
FROM SERVER dag_cargo_server
INTO cargo;

END IF;
END
$do$
Loading