diff --git a/.env.tpl b/.env.tpl index 7ef0c0f9d5..8bbbbddbe0 100644 --- a/.env.tpl +++ b/.env.tpl @@ -27,7 +27,7 @@ DATABASE_URL=http://localhost:3000 DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI # Postgres Database -DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres +DATABASE_CONNECTION=postgresql:/localhost:5432/postgres?user=postgres&password=postgres # Cluster CLUSTER_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0 diff --git a/.github/workflows/api.yml b/.github/workflows/api.yml index 75952ae0a6..26978f3a5c 100644 --- a/.github/workflows/api.yml +++ b/.github/workflows/api.yml @@ -27,7 +27,7 @@ jobs: env: DATABASE_URL: http://localhost:3000 DATABASE_TOKEN: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI - DATABASE_CONNECTION: postgresql://postgres:postgres@localhost:5432/postgres + DATABASE_CONNECTION: postgresql://localhost:5432/postgres?user=postgres&password=postgres deploy-dev: name: Deploy Dev if: github.event_name == 'pull_request' && github.ref != 'refs/heads/main' diff --git a/packages/api/db/flyway-config.cjs b/packages/api/db/flyway-config.cjs new file mode 100644 index 0000000000..f84e5a67e9 --- /dev/null +++ b/packages/api/db/flyway-config.cjs @@ -0,0 +1,51 @@ +const path = require('path') +const dotenv = require('dotenv') + +dotenv.config({ path: path.join(__dirname, '../../../.env') }) +const { env } = process + +if (!env.DATABASE_CONNECTION) { + throw new Error('Required env variable DATABASE_CONNECTION missing') +} + +let placeholders = { + NFT_STORAGE_USER: env.NFT_STORAGE_USER || 'CURRENT_USER', + NFT_STORAGE_STATS_USER: env.NFT_STORAGE_STATS_USER || 'CURRENT_USER', +} + +if ( + env.DAG_CARGO_HOST && + env.DAG_CARGO_DATABASE && + env.DAG_CARGO_USER && + env.DAG_CARGO_PASSWORD +) { + placeholders.DAG_CARGO_TEST_MODE = 'false' + placeholders.DAG_CARGO_HOST = env.DAG_CARGO_HOST + placeholders.DAG_CARGO_DATABASE = env.DAG_CARGO_DATABASE + placeholders.DAG_CARGO_USER = env.DAG_CARGO_USER + placeholders.DAG_CARGO_PASSWORD = env.DAG_CARGO_PASSWORD +} else { + placeholders.DAG_CARGO_TEST_MODE = 'true' + placeholders.DAG_CARGO_HOST = 'test' + placeholders.DAG_CARGO_DATABASE = 'test' + placeholders.DAG_CARGO_USER = 'test' + placeholders.DAG_CARGO_PASSWORD = 'test' +} + +// To get the correct command line args, we need to give node-flywaydb keys like +// `{ 'placeholders.VARIABLE_NAME': 'VALUE' }`, which translates to the cli arg +// `-placeholders.VARIABLE_NAME=VALUE` +placeholders = Object.fromEntries( + Object.entries(placeholders).map(([key, val]) => [`placeholders.${key}`, val]) +) + +module.exports = { + flywayArgs: { + url: `jdbc:${env.DATABASE_CONNECTION}`, + schemas: 'public', + locations: 'filesystem:db/migrations', + sqlMigrationSuffixes: '.sql', + baselineOnMigrate: true, + ...placeholders, + }, +} diff --git a/packages/api/db/migrations/001-add-HasSuperHotAccess-user_tag.sql b/packages/api/db/migrations/001-add-HasSuperHotAccess-user_tag.sql deleted file mode 100644 index fb57803c2a..0000000000 --- a/packages/api/db/migrations/001-add-HasSuperHotAccess-user_tag.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TYPE user_tag_type ADD VALUE 'HasSuperHotAccess'; diff --git a/packages/api/db/migrations/003-add-HasDeleteRestriction-type.sql b/packages/api/db/migrations/003-add-HasDeleteRestriction-type.sql deleted file mode 100644 index f603ed4440..0000000000 --- a/packages/api/db/migrations/003-add-HasDeleteRestriction-type.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TYPE user_tag_type ADD VALUE 'HasDeleteRestriction'; diff --git a/packages/api/db/migrations/003-user_tag_proposals.sql b/packages/api/db/migrations/003-user_tag_proposals.sql deleted file mode 100644 index afc7425720..0000000000 --- a/packages/api/db/migrations/003-user_tag_proposals.sql +++ /dev/null @@ -1,27 +0,0 @@ --- Migration to setup user_tag_proposal -CREATE TYPE user_tag_proposal_decision_type AS ENUM -( - 'Approved', - 'Declined' -); - -CREATE TABLE IF NOT EXISTS public.user_tag_proposal -( - id BIGSERIAL PRIMARY KEY, - user_id BIGINT NOT NULL REFERENCES public.user (id), - tag user_tag_type NOT NULL, - proposed_tag_value TEXT NOT NULL, - user_proposal_form jsonb NOT NULL, - admin_decision_message TEXT , - admin_decision_type user_tag_proposal_decision_type, - inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, - deleted_at TIMESTAMP WITH TIME ZONE -); --- Note: We index active user_tag_proposals with deleted_at IS NULL to enforce only 1 active --- tag type proposal per user. We allow there to be multiple deleted user_tag_proposals of the same type per --- user to handle the scenario where a user has been denied multiple times by admins. --- If deleted_at is populated, it means the user_tag_proposal has been cancelled by --- a user or a decision has been provided by an admin. -CREATE UNIQUE INDEX IF NOT EXISTS user_tag_proposal_is_not_deleted_idx ON user_tag_proposal (user_id, tag) -WHERE deleted_at IS NULL; - diff --git a/packages/api/db/migrations/V0001__initial_config.sql b/packages/api/db/migrations/V0001__initial_config.sql new file mode 100644 index 0000000000..24d10118f6 --- /dev/null +++ b/packages/api/db/migrations/V0001__initial_config.sql @@ -0,0 +1,16 @@ +-- PG doesn't support ALTER DATABASE CURRENT, and the db name is different between local/staging/production +-- So we have to execute using variable subsitution +DO $$ +BEGIN + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET default_statistics_target = 1000'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET enable_partitionwise_aggregate = on'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET enable_partitionwise_join = on'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_workers_per_gather = 8'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_workers = 16'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_maintenance_workers = 8'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET jit = on'; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET idle_in_transaction_session_timeout = ''1min'''; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET lock_timeout = ''1min'''; + EXECUTE 'ALTER DATABASE ' || current_database() || ' SET statement_timeout = ''3min'''; +END +$$; \ No newline at end of file diff --git a/packages/api/db/migrations/V0002__initial_tables.sql b/packages/api/db/migrations/V0002__initial_tables.sql new file mode 100644 index 0000000000..9055c19bc7 --- /dev/null +++ b/packages/api/db/migrations/V0002__initial_tables.sql @@ -0,0 +1,243 @@ +-- Auth key blocked status type is the type of blocking that has occurred on the api +-- key. These are primarily used by the admin app. +CREATE TYPE auth_key_blocked_status_type AS ENUM ( + -- The api key is blocked. + 'Blocked', + -- The api key is unblocked. + 'Unblocked' +); + +-- User tags are associated to a user for the purpose of granting/restricting them +-- in the application. +CREATE TYPE user_tag_type AS ENUM +( + 'HasAccountRestriction', + 'HasDeleteRestriction', + 'HasPsaAccess', + 'HasSuperHotAccess', + 'StorageLimitBytes' +); + +CREATE TYPE user_tag_proposal_decision_type AS ENUM +( + 'Approved', + 'Declined' +); + +-- Pin status type is a subset of IPFS Cluster "TrackerStatus". +-- https://github.com/ipfs/ipfs-cluster/blob/54c3608899754412861e69ee81ca8f676f7e294b/api/types.go#L52-L83 +CREATE TYPE pin_status_type AS ENUM ( + -- An error occurred pinning. + 'PinError', + -- The item has been queued for pinning on the IPFS daemon. + 'PinQueued', + -- The IPFS daemon has pinned the item. + 'Pinned', + -- The IPFS daemon is currently pinning the item. + 'Pinning' + ); + +-- Service type is the place/location/organisation that is pinning the content. +CREATE TYPE service_type AS ENUM ( + -- The NFT.Storage cluster in Pinata. + 'Pinata', + -- The original NFT.Storage cluster. + 'IpfsCluster', + -- The current cluster, originally commissioned for niftysave. + 'IpfsCluster2', + -- New cluster with flatfs and better DHT + 'IpfsCluster3' + ); + +-- Upload type is the type of received upload data. +CREATE TYPE upload_type AS ENUM ( + -- A CAR file upload. + 'Car', + -- A raw blob upload in the request body. + 'Blob', + -- A multi file upload using a multipart request. + 'Multipart', + -- An item pinned using the pinning service API. + 'Remote', + -- An "IPNFT" uploaded with the metadata store API. + 'Nft' + ); + +-- A user of NFT.Storage +CREATE TABLE IF NOT EXISTS public.user +( + id BIGSERIAL PRIMARY KEY, + magic_link_id TEXT UNIQUE, + github_id TEXT NOT NULL UNIQUE, + name TEXT NOT NULL, + picture TEXT, + email TEXT NOT NULL, + -- Cryptographic public address of the user. + public_address TEXT UNIQUE, + did TEXT UNIQUE, + github jsonb, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL +); +CREATE INDEX IF NOT EXISTS user_updated_at_idx ON public.user (updated_at); + +CREATE TABLE IF NOT EXISTS public.user_tag +( + id BIGSERIAL PRIMARY KEY, + user_id BIGINT NOT NULL REFERENCES public.user (id), + tag user_tag_type NOT NULL, + value TEXT NOT NULL, + reason TEXT NOT NULL, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at TIMESTAMP WITH TIME ZONE +); +CREATE UNIQUE INDEX IF NOT EXISTS user_tag_is_deleted_idx ON user_tag (user_id, tag, deleted_at) +WHERE deleted_at IS NOT NULL; +CREATE UNIQUE INDEX IF NOT EXISTS user_tag_is_not_deleted_idx ON user_tag (user_id, tag) +WHERE deleted_at IS NULL; + +-- These are user_tag(s) that a user has requested. It is assumed that a user can +-- only request one type of user_tag at any given time, hence the index associated +-- with this table. The admin app will have to create an entry in the user_tag +-- table once a proposal has been approved. These proposals are visible to both +-- users and admins. +CREATE TABLE IF NOT EXISTS public.user_tag_proposal +( + id BIGSERIAL PRIMARY KEY, + user_id BIGINT NOT NULL REFERENCES public.user (id), + tag user_tag_type NOT NULL, + proposed_tag_value TEXT NOT NULL, + user_proposal_form jsonb NOT NULL, + admin_decision_message TEXT , + admin_decision_type user_tag_proposal_decision_type, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at TIMESTAMP WITH TIME ZONE +); +-- Note: We index active user_tag_proposals with deleted_at IS NULL to enforce only 1 active +-- tag type proposal per user. We allow there to be multiple deleted user_tag_proposals of the same type per +-- user to handle the scenario where a user has been denied multiple times by admins. +-- If deleted_at is populated, it means the user_tag_proposal has been cancelled by +-- a user or a decision has been provided by an admin. +CREATE UNIQUE INDEX IF NOT EXISTS user_tag_proposal_is_not_deleted_idx ON user_tag_proposal (user_id, tag) +WHERE deleted_at IS NULL; + +-- API authentication tokens. +CREATE TABLE IF NOT EXISTS auth_key +( + id BIGSERIAL PRIMARY KEY, + -- User assigned name. + name TEXT NOT NULL, + -- The JWT used by the user to access the API. + secret TEXT NOT NULL, + user_id BIGINT NOT NULL REFERENCES public.user (id), + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE IF NOT EXISTS auth_key_history +( + id BIGSERIAL PRIMARY KEY, + status auth_key_blocked_status_type NOT NULL, + reason TEXT NOT NULL, + auth_key_id BIGSERIAL NOT NULL REFERENCES auth_key (id), + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at TIMESTAMP WITH TIME ZONE +); + +-- Details of the root of a file/directory stored on NFT.Storage. +CREATE TABLE IF NOT EXISTS content +( + -- Normalized base32 v1. + cid TEXT PRIMARY KEY, + -- Size of the DAG in bytes. Set if known on upload or for partials is set + -- when content is fully pinned in at least one location. + dag_size BIGINT, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL +); + +CREATE INDEX IF NOT EXISTS content_updated_at_idx ON content (updated_at); +CREATE INDEX IF NOT EXISTS content_inserted_at_idx ON content (inserted_at); +CREATE UNIQUE INDEX content_cid_with_size_idx ON content (cid) INCLUDE (dag_size); + + +-- Information for piece of content pinned in IPFS. +CREATE TABLE IF NOT EXISTS pin +( + id BIGSERIAL PRIMARY KEY, + -- Overall pinning status at this location (may be pinned on multiple nodes). + status pin_status_type NOT NULL, + -- The root CID of the pinned content, normalized as base32 v1. + content_cid text NOT NULL REFERENCES content (cid), + -- The place where this item is pinned. + service service_type NOT NULL, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + UNIQUE (content_cid, service) +); + +CREATE INDEX IF NOT EXISTS pin_composite_service_and_status_idx ON pin (service, status); +CREATE INDEX IF NOT EXISTS pin_composite_updated_at_and_content_cid_idx ON pin (updated_at, content_cid); + +-- An upload created by a user. +CREATE TABLE IF NOT EXISTS upload +( + id BIGSERIAL PRIMARY KEY, + user_id BIGINT NOT NULL REFERENCES public.user (id), + -- User authentication token that was used to upload this content. + -- Note: maybe be null when the user upload through the website. + key_id BIGINT REFERENCES auth_key (id), + -- The root CID of the uploaded content, normalized as base32 v1. + content_cid TEXT NOT NULL REFERENCES content (cid), + -- The root CID of the uploaded content, as provided by the user. + source_cid TEXT NOT NULL, + -- MIME type of the upload data as sent in the request. + mime_type TEXT, + type upload_type NOT NULL, + -- User provided name for this upload. + name TEXT, + -- List of files in the upload if the type was Mutlipart or Nft. + files jsonb, + -- User provided multiaddrs of origins of this upload (used by the pinning + -- service API). + origins jsonb, + -- Custom metadata. Currently used in 2 places: + -- 1. Pinning Service API user provided `Record`. + -- 2. Metaplex endpoint `/metaplex/upload` to store details of the Metaplex user. + meta jsonb, + backup_urls text[], + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + deleted_at TIMESTAMP WITH TIME ZONE, + UNIQUE (user_id, source_cid) +); + +CREATE INDEX IF NOT EXISTS upload_inserted_at_idx ON upload (inserted_at); +CREATE INDEX IF NOT EXISTS upload_content_cid_idx ON upload (content_cid); +CREATE INDEX IF NOT EXISTS upload_source_cid_idx ON upload (source_cid); +CREATE INDEX IF NOT EXISTS upload_updated_at_idx ON upload (updated_at); +CREATE INDEX IF NOT EXISTS upload_type_idx ON upload (type); + +CREATE VIEW admin_search as +select + u.id::text as user_id, + u.email as email, + ak.secret as token, + ak.id::text as token_id, + ak.deleted_at as deleted_at, + akh.inserted_at as reason_inserted_at, + akh.reason as reason, + akh.status as status +from public.user u +full outer join auth_key ak on ak.user_id = u.id +full outer join (select * from auth_key_history where deleted_at is null) as akh on akh.auth_key_id = ak.id; + +-- Metric contains the current values of collected metrics. +CREATE TABLE IF NOT EXISTS metric +( + name TEXT PRIMARY KEY, + value BIGINT NOT NULL, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL +); diff --git a/packages/api/db/migrations/V0003__initial_dagcargo.sql b/packages/api/db/migrations/V0003__initial_dagcargo.sql new file mode 100644 index 0000000000..c7ad197ed7 --- /dev/null +++ b/packages/api/db/migrations/V0003__initial_dagcargo.sql @@ -0,0 +1,16 @@ +DO +$do$ +BEGIN +IF '${DAG_CARGO_TEST_MODE}' != 'true' THEN + +CREATE SCHEMA IF NOT EXISTS cargo; + +-- Import dag cargo schema +IMPORT FOREIGN SCHEMA cargo + LIMIT TO (aggregate_entries, aggregates, deals, dags, metrics, metrics_log) + FROM SERVER dag_cargo_server + INTO cargo; + +END IF; +END +$do$ diff --git a/packages/api/db/migrations/V0004__initial_dagcargo_testing.sql b/packages/api/db/migrations/V0004__initial_dagcargo_testing.sql new file mode 100644 index 0000000000..334986693b --- /dev/null +++ b/packages/api/db/migrations/V0004__initial_dagcargo_testing.sql @@ -0,0 +1,117 @@ +DO +$do$ +BEGIN +IF '${DAG_CARGO_TEST_MODE}' = 'true' THEN + +CREATE SCHEMA IF NOT EXISTS cargo; + +-- This is a copy of the dagcargo schema for testing purposes. +-- https://github.com/nftstorage/dagcargo/blob/master/maint/pg_schema.sql + +CREATE TABLE IF NOT EXISTS cargo.aggregate_entries ( + aggregate_cid TEXT NOT NULL, + cid_v1 TEXT NOT NULL, + datamodel_selector TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS cargo.aggregates ( + aggregate_cid TEXT NOT NULL UNIQUE, + piece_cid TEXT UNIQUE NOT NULL, + sha256hex TEXT NOT NULL, + export_size BIGINT NOT NULL, + metadata JSONB, + entry_created TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS cargo.deals ( + deal_id BIGINT UNIQUE NOT NULL, + aggregate_cid TEXT NOT NULL, + client TEXT NOT NULL, + provider TEXT NOT NULL, + status TEXT NOT NULL, + status_meta TEXT, + start_epoch INTEGER NOT NULL, + start_time TIMESTAMP WITH TIME ZONE NOT NULL, + end_epoch INTEGER NOT NULL, + end_time TIMESTAMP WITH TIME ZONE NOT NULL, + sector_start_epoch INTEGER, + sector_start_time TIMESTAMP WITH TIME ZONE, + entry_created TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + entry_last_updated TIMESTAMP WITH TIME ZONE NOT NULL +); + +CREATE TABLE IF NOT EXISTS cargo.metrics ( + name TEXT NOT NULL, + dimensions TEXT[], + description TEXT NOT NULL, + value BIGINT, + collected_at TIMESTAMP WITH TIME ZONE, + collection_took_seconds NUMERIC NOT NULL +); + +CREATE TABLE IF NOT EXISTS cargo.metrics_log ( + name TEXT NOT NULL, + dimensions TEXT[], + value BIGINT, + collected_at TIMESTAMP WITH TIME ZONE +); + +-- Test data for cargo tables + +INSERT INTO cargo.metrics_log (name, dimensions, value, collected_at) VALUES + ('dagcargo_project_bytes_in_active_deals', '{{project,staging.nft.storage}}', 167859554927623, '2022-04-01 13:41:08.479404+00'); + +INSERT INTO cargo.metrics_log (name, dimensions, value, collected_at) VALUES + ('dagcargo_project_bytes_in_active_deals', '{{project,nft.storage}}', 169334115720738, '2022-03-01 16:33:28.505513+00'); + +INSERT INTO cargo.metrics_log (name, dimensions, value, collected_at) VALUES + ('dagcargo_project_bytes_in_active_deals', '{{project,nft.storage}}', 169334115720737, '2022-02-01 16:33:28.505513+00'); + +INSERT INTO cargo.aggregate_entries ("aggregate_cid", "cid_v1", "datamodel_selector") VALUES +('bafybeiek5gau46j4dxoyty27qtirb3iuoq7aax4l3xt25mfk2igyt35bme', 'bafybeiaj5yqocsg5cxsuhtvclnh4ulmrgsmnfbhbrfxrc3u2kkh35mts4e', 'Links/19/Hash/Links/46/Hash/Links/0/Hash'); + +INSERT INTO cargo.aggregates ("aggregate_cid", "piece_cid", "sha256hex", "export_size", "metadata", "entry_created") VALUES +('bafybeiek5gau46j4dxoyty27qtirb3iuoq7aax4l3xt25mfk2igyt35bme', 'baga6ea4seaqfanmqerzaiq7udm5wxx3hcmgapukudbadjarzkadudexamn5gwny', '9ad34a5221cc171dcc61c0862680634ca065c32972ab59f92626b7f2f18ca3fc', 25515304172, '{"Version": 1, "RecordType": "DagAggregate UnixFS"}', '2021-09-09 14:41:14.099613+00'); + +INSERT INTO cargo.deals ("deal_id", "aggregate_cid", "client", "provider", "status", "start_epoch", "end_epoch", "entry_created", "entry_last_updated", "status_meta", "start_time", "sector_start_epoch", "sector_start_time", "end_time") VALUES +(2424132, 'bafybeiek5gau46j4dxoyty27qtirb3iuoq7aax4l3xt25mfk2igyt35bme', 'f144zep4gitj73rrujd3jw6iprljicx6vl4wbeavi', 'f0678914', 'active', 1102102, 2570902, '2021-09-09 16:30:52.252233+00', '2021-09-10 00:45:50.408956+00', 'containing sector active as of 2021-09-10 00:36:30 at epoch 1097593', '2021-09-11 14:11:00+00', 1097593, '2021-09-10 00:36:30+00', '2023-02-03 14:11:00+00'); + +INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES +('dagcargo_project_items_in_active_deals', '{{project,staging.web3.storage}}', 'Count of aggregated items with at least one active deal per project', 1438, '2022-04-14 23:56:46.803497+00', 405.292); + +INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES +('dagcargo_project_items_in_active_deals', '{{project,nft.storage}}', 'Count of aggregated items with at least one active deal per project', 56426047, '2022-04-14 23:56:46.806892+00', 405.292); + +INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES +('dagcargo_project_bytes_in_active_deals', '{{project,nft.storage}}', 'Amount of per-DAG-deduplicated bytes with at least one active deal per project', 169389985753391, '2022-04-14 23:51:45.76915+00', 104.256); + +INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES +('dagcargo_project_bytes_in_active_deals', '{{project,staging.web3.storage}}', 'Amount of per-DAG-deduplicated bytes with at least one active deal per project', 133753809372, '2022-04-14 23:51:45.76712+00', 104.256); + +INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES +('dagcargo_project_bytes_in_active_deals', '{{project,web3.storage}}', 'Amount of per-DAG-deduplicated bytes with at least one active deal per project', 181663391277785, '2022-04-14 23:51:45.768323+00', 104.256); + +INSERT INTO public.metric (name, value, updated_at) + VALUES ('uploads_past_7_total', 2011366, TIMEZONE('utc', NOW())); + +INSERT INTO public.metric (name, value, updated_at) + VALUES ('uploads_nft_total', 685866, TIMEZONE('utc', NOW())); + +INSERT INTO public.metric (name, value, updated_at) + VALUES ('uploads_remote_total', 11077834, TIMEZONE('utc', NOW())); + +INSERT INTO public.metric (name, value, updated_at) + VALUES ('uploads_car_total', 17711308, TIMEZONE('utc', NOW())); + +INSERT INTO public.metric (name, value, updated_at) + VALUES ('uploads_multipart_total', 1456388, TIMEZONE('utc', NOW())); + +INSERT INTO public.metric (name, value, updated_at) + VALUES ('uploads_blob_total', 12420729, TIMEZONE('utc', NOW())); + +INSERT INTO public."user" (magic_link_id, github_id, name, email, public_address) VALUES ('did:ethr:0x65007A739ab7AC5c537161249b81250E49e2853C', 'github|000000', 'mock user', 'test@gmail.com', '0x65007A739ab7AC5c537161249b81250E49e2853C'); +INSERT INTO public.auth_key (name, secret, user_id) VALUES ('main', 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0FDNWM1MzcxNjEyNDliODEyNTBFNDllMjg1M0MiLCJpc3MiOiJuZnQtc3RvcmFnZSIsImlhdCI6MTYzOTc1NDczNjYzOCwibmFtZSI6Im1haW4ifQ.wKwJIRXXHsgwVp8mOQp6r3_F4Lz5lnoAkgVP8wqwA_Y', 1); + +END IF; +END +$do$ \ No newline at end of file diff --git a/packages/api/db/migrations/V0005__initial_dagcargo_fdw.sql b/packages/api/db/migrations/V0005__initial_dagcargo_fdw.sql new file mode 100644 index 0000000000..cdec929d55 --- /dev/null +++ b/packages/api/db/migrations/V0005__initial_dagcargo_fdw.sql @@ -0,0 +1,39 @@ +DO +$do$ +BEGIN +IF '${DAG_CARGO_TEST_MODE}' != 'true' THEN + +CREATE +EXTENSION IF NOT EXISTS postgres_fdw; + +DROP +SERVER IF EXISTS dag_cargo_server CASCADE; + +CREATE +SERVER dag_cargo_server + FOREIGN DATA WRAPPER postgres_fdw + OPTIONS ( + host '${DAG_CARGO_HOST}', + dbname '${DAG_CARGO_DATABASE}', + fetch_size '200000' + ); + +CREATE +USER MAPPING FOR ${NFT_STORAGE_USER} + SERVER dag_cargo_server + OPTIONS ( + user '${DAG_CARGO_USER}', + password '${DAG_CARGO_PASSWORD}' + ); + +CREATE +USER MAPPING FOR ${NFT_STORAGE_STATS_USER} + SERVER dag_cargo_server + OPTIONS ( + user '${DAG_CARGO_USER}', + password '${DAG_CARGO_PASSWORD}' + ); + +END IF; +END +$do$ \ No newline at end of file diff --git a/packages/api/db/migrations/V0006__initial_functions.sql b/packages/api/db/migrations/V0006__initial_functions.sql new file mode 100644 index 0000000000..cb9ca955aa --- /dev/null +++ b/packages/api/db/migrations/V0006__initial_functions.sql @@ -0,0 +1,119 @@ +DROP FUNCTION IF EXISTS create_upload; +DROP FUNCTION IF EXISTS find_deals_by_content_cids; +DROP FUNCTION IF EXISTS json_arr_to_text_arr; + +DROP TYPE IF EXISTS upload_pin_type; + +CREATE TYPE upload_pin_type AS +( + status pin_status_type, + service service_type +); + +-- transform a JSON array property into an array of SQL text elements +CREATE OR REPLACE FUNCTION json_arr_to_text_arr(_json json) + RETURNS text[] LANGUAGE sql IMMUTABLE PARALLEL SAFE AS + 'SELECT ARRAY(SELECT json_array_elements_text(_json))'; + +CREATE OR REPLACE FUNCTION create_upload(data json) RETURNS void + LANGUAGE plpgsql + volatile + PARALLEL UNSAFE +AS +$$ +DECLARE + inserted_upload_id BIGINT; +BEGIN + SET LOCAL statement_timeout = '30s'; + insert into content (cid, dag_size, updated_at, inserted_at) + values (data ->> 'content_cid', + (data ->> 'dag_size')::BIGINT, + (data ->> 'updated_at')::timestamptz, + (data ->> 'inserted_at')::timestamptz) + ON CONFLICT ( cid ) DO NOTHING; + + insert into pin (content_cid, status, service, updated_at, inserted_at) + select data ->> 'content_cid', + status, + service, + (data ->> 'updated_at')::timestamptz, + (data ->> 'inserted_at')::timestamptz + from json_populate_recordset(null::upload_pin_type, (data ->> 'pins')::json) + on conflict (content_cid, service) do nothing; + + insert into upload as upld (user_id, + key_id, + content_cid, + source_cid, + mime_type, + type, + name, + files, + origins, + meta, + backup_urls, + updated_at, + inserted_at) + values ((data ->> 'user_id')::BIGINT, + (data ->> 'key_id')::BIGINT, + data ->> 'content_cid', + data ->> 'source_cid', + data ->> 'mime_type', + (data ->> 'type')::upload_type, + data ->> 'name', + (data ->> 'files')::jsonb, + (data ->> 'origins')::jsonb, + (data ->> 'meta')::jsonb, + json_arr_to_text_arr(data -> 'backup_urls'), + (data ->> 'updated_at')::timestamptz, + (data ->> 'inserted_at')::timestamptz) + ON CONFLICT ( user_id, source_cid ) + DO UPDATE SET deleted_at = null, + updated_at = (data ->> 'updated_at')::timestamptz, + name = data ->> 'name', + meta = (data ->> 'meta')::jsonb, + origins = (data ->> 'origins')::jsonb, + mime_type = data ->> 'mime_type', + type = (data ->> 'type')::upload_type, + backup_urls = upld.backup_urls || json_arr_to_text_arr(data -> 'backup_urls') + RETURNING id INTO inserted_upload_id; +END +$$; + +CREATE OR REPLACE FUNCTION find_deals_by_content_cids(cids text[]) + RETURNS TABLE + ( + status text, + "lastChanged" timestamptz, + "chainDealID" bigint, + "datamodelSelector" text, + "statusText" text, + "dealActivation" timestamptz, + "dealExpiration" timestamptz, + miner text, + "pieceCid" text, + "batchRootCid" text, + "contentCid" text + ) + LANGUAGE sql + STABLE + PARALLEL SAFE +AS +$$ +SELECT COALESCE(de.status, 'queued') as status, + de.entry_last_updated as lastChanged, + de.deal_id as chainDealID, + ae.datamodel_selector as datamodelSelector, + de.status_meta as statusText, + de.start_time as dealActivation, + de.end_time as dealExpiration, + de.provider as miner, + a.piece_cid as pieceCid, + ae.aggregate_cid as batchRootCid, + ae.cid_v1 as contentCid +FROM cargo.aggregate_entries ae + JOIN cargo.aggregates a USING (aggregate_cid) + LEFT JOIN cargo.deals de USING (aggregate_cid) +WHERE ae.cid_v1 = ANY (cids) +ORDER BY de.entry_last_updated +$$; diff --git a/packages/api/db/migrations/002-import-metrics-cargo-schema.sql b/packages/api/db/migrations/V0008__import_metrics_cargo_schema.sql similarity index 57% rename from packages/api/db/migrations/002-import-metrics-cargo-schema.sql rename to packages/api/db/migrations/V0008__import_metrics_cargo_schema.sql index c3565cb9f0..fccf4f0003 100644 --- a/packages/api/db/migrations/002-import-metrics-cargo-schema.sql +++ b/packages/api/db/migrations/V0008__import_metrics_cargo_schema.sql @@ -1,5 +1,14 @@ +DO +$do$ +BEGIN +IF '${DAG_CARGO_TEST_MODE}' != 'true' THEN + -- Import dag cargo schema IMPORT FOREIGN SCHEMA cargo LIMIT TO (metrics, metrics_log) FROM SERVER dag_cargo_server - INTO cargo; \ No newline at end of file + INTO cargo; + +END IF; +END +$do$ diff --git a/packages/api/package.json b/packages/api/package.json index 3f66647fe4..06152a513f 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -7,12 +7,13 @@ "main": "dist/worker.js", "scripts": { "deploy": "wrangler publish --env production", - "predev": "./scripts/cli.js db --start && ./scripts/cli.js db-sql --cargo --testing --reset", + "predev": "./scripts/cli.js db --start && npm run db-migrate", "dev": "miniflare --watch --debug --env ../../.env", "clean": "./scripts/cli.js db --clean && rm -rf docker/compose", "build": "scripts/cli.js build", "test": "tsc && playwright-test \"test/**/*.spec.js\" --sw src/index.js", - "db-types": "./scripts/cli.js db-types" + "db-types": "./scripts/cli.js db-types", + "db-migrate": "flyway -c db/flyway-config.cjs migrate" }, "author": "Hugo Dias (hugodias.me)", "license": "(Apache-2.0 OR MIT)", @@ -53,6 +54,7 @@ "git-rev-sync": "^3.0.1", "ipfs-unixfs-importer": "^9.0.3", "miniflare": "^2.0.0-rc.3", + "node-flywaydb": "^3.0.7", "npm-run-all": "^4.1.5", "openapi-typescript": "^4.0.2", "p-retry": "^4.6.1", diff --git a/packages/api/pw-test.config.cjs b/packages/api/pw-test.config.cjs index d20b30cbbb..83bca7f614 100644 --- a/packages/api/pw-test.config.cjs +++ b/packages/api/pw-test.config.cjs @@ -47,7 +47,7 @@ module.exports = { await execa(cli, ['db', '--start'], { stdio: 'inherit' }) console.log('⚡️ Cluster and Postgres started.') - await execa(cli, ['db-sql', '--cargo', '--testing', '--reset'], { + await execa('yarn', ['run', 'db-migrate'], { stdio: 'inherit', }) console.log('⚡️ SQL schema loaded.') diff --git a/yarn.lock b/yarn.lock index 5f5c8211b5..020ce01c10 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4369,6 +4369,11 @@ resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-14.1.1.tgz#e1ff6118896e4b22af31e5ea2f9da956adde23d8" integrity sha512-XrjH/iEUqNl9lF2HX9YhPNV7Amntkcnpw0Bo1KkRzowNDcgSN9i0nm4Q8Oi5wupgdfPaJNMAWa61A+voD6Kmwg== +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + "@types/acorn@^4.0.0": version "4.0.6" resolved "https://registry.yarnpkg.com/@types/acorn/-/acorn-4.0.6.tgz#d61ca5480300ac41a7d973dd5b84d0a591154a22" @@ -7137,6 +7142,11 @@ commander@^4.1.1: resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== +commander@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" + integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== + commander@^6.2.0, commander@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" @@ -9351,7 +9361,7 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -extract-zip@2.0.1: +extract-zip@2.0.1, extract-zip@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== @@ -10681,6 +10691,15 @@ http-https@^1.0.0: resolved "https://registry.yarnpkg.com/http-https/-/http-https-1.0.0.tgz#2f908dd5f1db4068c058cd6e6d4ce392c913389b" integrity sha1-L5CN1fHbQGjAWM1ubUzjkskTOJs= +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + http2-wrapper@^1.0.0-beta.5.2: version "1.0.3" resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" @@ -14339,6 +14358,18 @@ node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.6, node- version "2.6.7" resolved "https://registry.npmjs.org/@achingbrain/node-fetch/-/node-fetch-2.6.7.tgz#1b5d62978f2ed07b99444f64f0df39f960a6d34d" +node-flywaydb@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/node-flywaydb/-/node-flywaydb-3.0.7.tgz#b8e36b36f7237859f586396f965c43f9f712062c" + integrity sha512-UzTecD8DgXAdMqc/Gja/vB3Pr3CCgDir7oMfnLqzSk7DwCDKaIQOTKXnLpsh+LiW68Yvum0yPdHAB2p40IkxRg== + dependencies: + commander "^5.0.0" + extract-zip "^2.0.0" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + rimraf "^3.0.2" + xml2js "^0.4.23" + node-forge@^1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -15981,11 +16012,16 @@ prepend-http@^2.0.0: resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= -prettier@2.5.1, "prettier@>=2.2.1 <=2.3.0", prettier@^2.5.1: +prettier@2.5.1, prettier@^2.5.1: version "2.5.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.5.1.tgz#fff75fa9d519c54cf0fce328c1017d94546bc56a" integrity sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg== +"prettier@>=2.2.1 <=2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18" + integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w== + pretty-error@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.2.tgz#be89f82d81b1c86ec8fdfbc385045882727f93b6" @@ -17270,6 +17306,11 @@ sass-loader@^12.6.0: klona "^2.0.4" neo-async "^2.6.2" +sax@>=0.6.0: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + scheduler@^0.20.2: version "0.20.2" resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91" @@ -18939,7 +18980,12 @@ typedoc@^0.22.14: minimatch "^5.0.1" shiki "^0.10.1" -typescript@4.4.4, typescript@4.5.3: +typescript@4.4.4: + version "4.4.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c" + integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA== + +typescript@4.5.3: version "4.5.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.3.tgz#afaa858e68c7103317d89eb90c5d8906268d353c" integrity sha512-eVYaEHALSt+s9LbvgEv4Ef+Tdq7hBiIZgii12xXJnukryt3pMgJf6aKhoCZ3FWQsu6sydEnkg11fYXLzhLBjeQ== @@ -20058,11 +20104,24 @@ xml-but-prettier@^1.0.1: dependencies: repeat-string "^1.5.2" +xml2js@^0.4.23: + version "0.4.23" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66" + integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + xml@=1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/xml/-/xml-1.0.1.tgz#78ba72020029c5bc87b8a81a3cfcd74b4a2fc1e5" integrity sha1-eLpyAgApxbyHuKgaPPzXS0ovweU= +xmlbuilder@~11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"