Skip to content

Commit

Permalink
fix[backend]: FixIntegrations to replace claims-all with claimsfeed2
Browse files Browse the repository at this point in the history
  • Loading branch information
Kholoudxs55kh committed Aug 5, 2024
1 parent f65083c commit f066327
Show file tree
Hide file tree
Showing 5 changed files with 5,621 additions and 4,924 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ RUN apt-get install -y openssl
WORKDIR /app

COPY package.json ./
COPY package-lock.json ./
# COPY package-lock.json ./

COPY ./prisma ./prisma

Expand All @@ -16,4 +16,4 @@ RUN npx prisma generate

EXPOSE 9000

CMD npm run docker:dev
CMD npm run docker:dev
2 changes: 1 addition & 1 deletion __tests__/Intergration.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ describe("Integration tests", () => {

it("should get all claims if no query params provided", async () => {
// const response = await request(app).get("/api/claim"); // not working
const response = await request(app).get("/api/claims-all");
const response = await request(app).get("/api/claimsfeed2");

expect(response.status).toBe(201);
expect(response.body.claimsData.length).toBeGreaterThan(0);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
-- CreateEnum
ALTER TYPE "AuthType" ADD VALUE 'OAUTH';

-- CreateEnum
CREATE TYPE "ValidationStatus" AS ENUM ('PENDING', 'COMPLETED', 'REJECTED', 'ABANDONED');

-- CreateEnum
CREATE TYPE "ResponseStatus" AS ENUM ('GREEN', 'YELLOW', 'GREY', 'RED');

-- CreateTable
CREATE TABLE "ClaimData" (
"id" SERIAL NOT NULL,
"claimId" INTEGER NOT NULL,
"name" TEXT NOT NULL,

CONSTRAINT "ClaimData_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "Image" (
"id" SERIAL NOT NULL,
"claimId" INTEGER NOT NULL,
"url" TEXT NOT NULL,
"digestMultibase" TEXT,
"metadata" JSONB,
"effectiveDate" TIMESTAMP(3) NOT NULL,
"createdDate" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"owner" TEXT NOT NULL,
"signature" TEXT NOT NULL,

CONSTRAINT "Image_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "CandidUserInfo" (
"id" SERIAL NOT NULL,
"claimId" INTEGER,
"firstName" TEXT,
"lastName" TEXT,
"candid_entity_id" TEXT NOT NULL,
"email" TEXT NOT NULL,
"profileURL" TEXT NOT NULL,

CONSTRAINT "CandidUserInfo_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "ValidationRequest" (
"id" SERIAL NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"context" TEXT NOT NULL,
"validatorName" TEXT NOT NULL,
"validatorEmail" TEXT NOT NULL,
"claimId" INTEGER NOT NULL,
"validationClaimId" INTEGER,
"validationStatus" "ValidationStatus" NOT NULL DEFAULT 'PENDING',
"response" "ResponseStatus",
"validationDate" TIMESTAMP(3),
"statement" TEXT,

CONSTRAINT "ValidationRequest_pkey" PRIMARY KEY ("id")
);

-- CreateIndex
CREATE UNIQUE INDEX "ClaimData_claimId_key" ON "ClaimData"("claimId");

-- CreateIndex
CREATE UNIQUE INDEX "CandidUserInfo_claimId_key" ON "CandidUserInfo"("claimId");

-- CreateIndex
CREATE UNIQUE INDEX "ValidationRequest_validationClaimId_key" ON "ValidationRequest"("validationClaimId");

48 changes: 33 additions & 15 deletions src/dao/api.dao.ts
Original file line number Diff line number Diff line change
Expand Up @@ -302,35 +302,53 @@ export class NodeDao {
INNER JOIN "Node" AS n2 ON e."endNodeId" = n2.id
LEFT JOIN "Edge" as e2 ON n2.id = e2."startNodeId"
LEFT JOIN "Node" as n3 ON e2."endNodeId" = n3.id
LEFT JOIN "Image" as i ON c.id = i."claimId"
LEFT JOIN "ClaimData" as cd ON c.id = cd."claimId"
${Prisma.raw(whereClause)}
ORDER BY c."effectiveDate" DESC
LIMIT ${limit}
OFFSET ${offset}
`;

const feedEntries = await prisma.$queryRaw<FeedEntry[]>(rawQ);
console.log(
"Feed entries raw query: " +
rawQ.sql +
"\n with values \n" +
rawQ.values
);
const feedEntries = await prisma.$queryRaw<FeedEntry[]>`${rawQ}`;
// remove duplicates
const uniqueEntriesMap: Map<
string,
Map<string | null, Map<number, FeedEntry>>
> = new Map();

feedEntries.forEach((entry: FeedEntry) => {
if (!uniqueEntriesMap.has(entry.name)) {
uniqueEntriesMap.set(entry.name, new Map());
}

const sourceLinkMap = uniqueEntriesMap.get(entry.name)!;

if (!sourceLinkMap.has(entry.source_link)) {
sourceLinkMap.set(entry.source_link, new Map());
}

const claimIdMap = new Map<number, FeedEntry>();
feedEntries.forEach((entry) => {
const claimIdMap = sourceLinkMap.get(entry.source_link)!;
if (!claimIdMap.has(entry.claim_id)) {
claimIdMap.set(entry.claim_id, entry);
}
});

const uniqueEntriesByClaimId = Array.from(claimIdMap.values());

const nameMap = new Map<string, FeedEntry>();
uniqueEntriesByClaimId.forEach((entry) => {
if (!nameMap.has(entry.name)) {
nameMap.set(entry.name, entry);
}
const uniqueFeedEntries: FeedEntry[] = [];
uniqueEntriesMap.forEach((sourceLinkMap) => {
sourceLinkMap.forEach((claimIdMap) => {
claimIdMap.forEach((entry) => {
uniqueFeedEntries.push(entry);
});
});
});

const uniqueEntriesByName = Array.from(nameMap.values());

return uniqueEntriesByName;
return uniqueFeedEntries;
} catch (error) {
console.error("Error fetching feed entries:", error);
throw new Error("Failed to fetch feed entries");
Expand Down
Loading

0 comments on commit f066327

Please sign in to comment.