Skip to content

Commit

Permalink
Merge pull request #1275 from Giveth/staging
Browse files Browse the repository at this point in the history
v1.22.0  2024-02-18(first release)
  • Loading branch information
jainkrati authored Feb 18, 2024
2 parents f76d8cd + 4873cd1 commit c77b1c0
Show file tree
Hide file tree
Showing 99 changed files with 5,730 additions and 1,432 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/develop-pipeline.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: develop-pipeline
name: develop-pipeline

on:
push:
Expand Down Expand Up @@ -43,7 +43,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 18.17.1
node-version: 20.11.0
- name: Install dependencies
run: npm ci
- name: Run tslint
Expand Down
44 changes: 41 additions & 3 deletions .github/workflows/master-pipeline.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: master-pipeline
name: master-pipeline

on:
push:
Expand All @@ -7,6 +7,7 @@ on:
pull_request:
branches:
- master

jobs:
test:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -38,20 +39,56 @@ jobs:
--health-retries 5
ports:
- 5443:5432

steps:
- uses: actions/checkout@v1
- name: Checkout
uses: actions/checkout@v1
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_S3_REGION }}

- name: Download latest DB backup from S3
run: |
FILENAME=$(aws s3 ls ${{ secrets.AWS_S3_BUCKET_PATH_PROD }}/ | sort | tail -n 1 | awk '{print $4}')
aws s3 cp ${{ secrets.AWS_S3_BUCKET_PATH_PROD }}/$FILENAME /tmp/db_backup.zip
- name: Unzip DB backup
run: |
unzip /tmp/db_backup.zip -d /tmp
mv /tmp/backups/givethio-db/*.sql /tmp/backups/givethio-db/db_backup.sql
- name: Wait for PostgreSQL to become ready
run: |
for i in {1..10}
do
pg_isready -h localhost -p 5443 -U postgres && echo Success && break
echo -n .
sleep 1
done
- name: Restore DB backup
run: PGPASSWORD=postgres psql -h localhost -p 5443 -U postgres -d givethio < /tmp/backups/givethio-db/db_backup.sql

- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 18.17.1
node-version: 20.11.0

- name: Install dependencies
run: npm ci

- name: Run tslint
run: npm run tslint

- name: Run build
run: npm run build

- name: Run migrations
run: npm run db:migrate:run:test

- name: Run tests
run: npm run test
env:
Expand All @@ -65,6 +102,7 @@ jobs:
CELO_ALFAJORES_SCAN_API_KEY: ${{ secrets.CELO_ALFAJORES_SCAN_API_KEY }}
MORDOR_ETC_TESTNET: ${{ secrets.MORDOR_ETC_TESTNET }}
ETC_NODE_HTTP_URL: ${{ secrets.ETC_NODE_HTTP_URL }}
DROP_DATABASE: ${{ secrets.DROP_DATABASE_DURING_TEST_PROD }}
SOLANA_TEST_NODE_RPC_URL: ${{ secrets.SOLANA_TEST_NODE_RPC_URL }}
SOLANA_DEVNET_NODE_RPC_URL: ${{ secrets.SOLANA_DEVNET_NODE_RPC_URL }}
SOLANA_MAINNET_NODE_RPC_URL: ${{ secrets.SOLANA_MAINNET_NODE_RPC_URL }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/staging-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 18.17.1
node-version: 20.11.0

- name: Install dependencies
run: npm ci
Expand All @@ -102,7 +102,7 @@ jobs:
CELO_ALFAJORES_SCAN_API_KEY: ${{ secrets.CELO_ALFAJORES_SCAN_API_KEY }}
MORDOR_ETC_TESTNET: ${{ secrets.MORDOR_ETC_TESTNET }}
ETC_NODE_HTTP_URL: ${{ secrets.ETC_NODE_HTTP_URL }}
DROP_DATABASE: ${{ secrets.DROP_DATABASE }}
DROP_DATABASE: ${{ secrets.DROP_DATABASE_DURING_TEST_STAGING }}
SOLANA_TEST_NODE_RPC_URL: ${{ secrets.SOLANA_TEST_NODE_RPC_URL }}
SOLANA_DEVNET_NODE_RPC_URL: ${{ secrets.SOLANA_DEVNET_NODE_RPC_URL }}
SOLANA_MAINNET_NODE_RPC_URL: ${{ secrets.SOLANA_MAINNET_NODE_RPC_URL }}
Expand Down
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v18.17.1
v20.11.0
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#https://hub.docker.com/_/node?tab=tags&page=1
FROM node:16.14.2-alpine3.15
FROM node:20.11.0-alpine3.18

WORKDIR /usr/src/app

Expand All @@ -11,7 +11,7 @@ COPY tsconfig.json .

RUN apk add --update alpine-sdk
RUN apk add git python3
RUN apk add --no-cache chromium --repository=http://dl-cdn.alpinelinux.org/alpine/v3.10/main
RUN apk add --no-cache chromium --repository=http://dl-cdn.alpinelinux.org/alpine/v3.18/main
RUN npm ci
RUN npm i -g ts-node

Expand Down
85 changes: 54 additions & 31 deletions README.md

Large diffs are not rendered by default.

7 changes: 7 additions & 0 deletions config/example.env
Original file line number Diff line number Diff line change
Expand Up @@ -270,3 +270,10 @@ NUMBER_OF_UPDATE_RECURRING_DONATION_CONCURRENT_JOB=1
UPDATE_RECURRING_DONATIONS_STREAM=0 0 * * *

MPETH_GRAPHQL_PRICES_URL=
MPETH_GRAPHQL_PRICES_URL=

# Draft donation match expiration hours, they will be deleted after to lessen dabase size
ENABLE_DRAFT_DONATION=true
DRAFT_DONATION_MATCH_EXPIRATION_HOURS=24
MATCH_DRAFT_DONATION_CRONJOB_EXPRESSION = '0 */5 * * * *';

5 changes: 4 additions & 1 deletion config/test.env
Original file line number Diff line number Diff line change
Expand Up @@ -212,4 +212,7 @@ DISABLE_NOTIFICATION_CENTER=false
DONATION_SAVE_BACKUP_CRONJOB_EXPRESSION=
ENABLE_IMPORT_DONATION_BACKUP=false
DONATION_SAVE_BACKUP_API_URL=
DONATION_SAVE_BACKUP_API_SECRET=
DONATION_SAVE_BACKUP_API_SECRET=

DRAFT_DONATION_MATCH_EXPIRATION_HOURS=24
ENABLE_DRAFT_DONATION=true
8 changes: 7 additions & 1 deletion migration/1646303882607-seedTokes.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import { Token } from '../src/entities/token';
import seedTokens from './data/seedTokens';
import { ChainType } from '../src/types/network';

// tslint:disable-next-line:class-name
export class seedTokes1646303882607 implements MigrationInterface {
Expand All @@ -14,8 +15,13 @@ export class seedTokes1646303882607 implements MigrationInterface {
seedTokens
// We add goerli tokens in addGoerliTokens migration file
.filter(token => token.networkId !== 5)
.map(t => {
.filter(token => !token.chainType || token.chainType === ChainType.EVM)
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
6 changes: 5 additions & 1 deletion migration/1661116436720-addGoerliTokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,12 @@ export class addGoerliTokens1661116436720 implements MigrationInterface {
Token,
seedTokens
.filter(token => token.networkId === 5)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
6 changes: 5 additions & 1 deletion migration/1677742523974-addPolygonTokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,12 @@ export class addGoerliTokens1677742523974 implements MigrationInterface {
Token,
seedTokens
.filter(token => token.networkId === NETWORK_IDS.POLYGON)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
6 changes: 5 additions & 1 deletion migration/1680014857601-addCeloTokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,12 @@ export class addCeloTokens1680014857601 implements MigrationInterface {
Token,
seedTokens
.filter(token => token.networkId === networkId)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
6 changes: 5 additions & 1 deletion migration/1683008685487-AddOptimisticTokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,12 @@ export class AddOptimisticTokens1683008685487 implements MigrationInterface {
token.symbol !== 'ETH' &&
token.symbol !== 'OP',
)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
6 changes: 5 additions & 1 deletion migration/1687383705794-AddOptimismGoerliTokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,12 @@ export class AddOptimismGoerliTokens1687383705794
Token,
seedTokens
.filter(token => token.networkId === NETWORK_IDS.OPTIMISM_GOERLI)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
6 changes: 5 additions & 1 deletion migration/1696421249294-Add_ETC_Tokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,12 @@ export class AddETCTokens1696421249294 implements MigrationInterface {
Token,
seedTokens
.filter(token => token.networkId === NETWORK_IDS.ETC)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import { Token } from '../src/entities/token';
import seedTokens from './data/seedTokens';
import { NETWORK_IDS } from '../src/provider';

export class AddCryptoCompareIdAndCoingeckoIdToTokenTable1696421249294
implements MigrationInterface
Expand Down
6 changes: 5 additions & 1 deletion migration/1697028245800-add_mordor_etc_testnet_tokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,12 @@ export class addMordorEtcTestnetTokens1697028245800
Token,
seedTokens
.filter(token => token.networkId === NETWORK_IDS.MORDOR_ETC_TESTNET)
.map(t => {
.map(token => {
const t = {
...token,
};
t.address = t.address?.toLowerCase();
delete t.chainType;
return t;
}),
);
Expand Down
48 changes: 48 additions & 0 deletions migration/1702364570535-create_anchor_contract_address_table.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

export class createAnchorContractAddressTable1702364570535
implements MigrationInterface
{
async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE TABLE "anchor_contract_address" (
"id" SERIAL PRIMARY KEY,
"networkId" INTEGER NOT NULL,
"isActive" BOOLEAN DEFAULT false,
"address" TEXT NOT NULL,
"txHash" TEXT NOT NULL,
"projectId" INTEGER NULL,
"creatorId" INTEGER NULL,
"ownerId" INTEGER NULL,
"updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "UQ_address_networkId_project" UNIQUE ("address", "networkId", "projectId")
);
CREATE INDEX "IDX_address" ON "anchor_contract_address" ("address");
CREATE INDEX "IDX_networkId" ON "anchor_contract_address" ("networkId");
CREATE INDEX "IDX_projectId" ON "anchor_contract_address" ("projectId");
CREATE INDEX "IDX_creatorId" ON "anchor_contract_address" ("creatorId");
CREATE INDEX "IDX_ownerId" ON "anchor_contract_address" ("ownerId");
`);

await queryRunner.query(`
ALTER TABLE "anchor_contract_address"
ADD CONSTRAINT "FK_anchor_contract_address_project"
FOREIGN KEY ("projectId") REFERENCES "project"("id")
ON DELETE SET NULL;
`);
}

async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "anchor_contract_address"
DROP CONSTRAINT "FK_anchor_contract_address_project";
`);

await queryRunner.query(`
DROP TABLE "anchor_contract_address";
`);
}
}
40 changes: 40 additions & 0 deletions migration/1702445735585-create_recurring_donation_table.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

export class createRecurringDonationTable1702445735585
implements MigrationInterface
{
async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE TABLE recurring_donation (
id SERIAL PRIMARY KEY,
"networkId" INT NOT NULL,
"txHash" text NOT NULL,
"projectId" INT,
"anchorContractAddressId" INT,
"donorId" INT,
"updatedAt" TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
"createdAt" TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
finished BOOLEAN,
CONSTRAINT fk_project
FOREIGN KEY("projectId")
REFERENCES project(id),
CONSTRAINT fk_anchor_contract_address
FOREIGN KEY("anchorContractAddressId")
REFERENCES anchor_contract_address(id),
CONSTRAINT fk_donor
FOREIGN KEY("donorId")
REFERENCES "user"(id),
UNIQUE("txHash", "networkId", "projectId")
);
CREATE INDEX "idx_txHash" ON recurring_donation("txHash");
CREATE INDEX "idx_projectId" ON recurring_donation("projectId");
CREATE INDEX "idx_anchorContractAddressId" ON recurring_donation("anchorContractAddressId");
CREATE INDEX "donorId" ON recurring_donation("donorId");
`);
}

async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE IF EXISTS recurring_donations;`);
}
}
Loading

0 comments on commit c77b1c0

Please sign in to comment.