From 7d8177c3bf73cb9bd0a536ee45b7ce8c45f3c495 Mon Sep 17 00:00:00 2001 From: Carlos Lostao Date: Thu, 30 Jan 2025 15:50:18 +0100 Subject: [PATCH 1/4] feat: enable creation of publicn downloadable links --- backend/__tests__/e2e/objects/object.spec.ts | 20 ++-- backend/__tests__/e2e/uploads/files.spec.ts | 6 +- backend/__tests__/e2e/uploads/folder.spec.ts | 3 +- backend/__tests__/utils/mocks.ts | 18 +--- .../20250130141940-add-public-urls.js | 53 +++++++++++ .../20250130141940-add-public-urls-down.sql | 3 + .../20250130141940-add-public-urls-up.sql | 11 +++ backend/src/controllers/object.ts | 93 ++++++++++++++++++- backend/src/models/objects/object.ts | 6 ++ backend/src/models/users/user.ts | 26 +----- .../repositories/objects/publishedObjects.ts | 74 +++++++++++++++ .../src/repositories/users/subscriptions.ts | 10 ++ backend/src/useCases/objects/files.ts | 22 +++-- backend/src/useCases/objects/object.ts | 53 ++++++++++- backend/src/useCases/users/subscriptions.ts | 7 ++ 15 files changed, 342 insertions(+), 63 deletions(-) create mode 100644 backend/migrations/20250130141940-add-public-urls.js create mode 100644 backend/migrations/sqls/20250130141940-add-public-urls-down.sql create mode 100644 backend/migrations/sqls/20250130141940-add-public-urls-up.sql create mode 100644 backend/src/repositories/objects/publishedObjects.ts diff --git a/backend/__tests__/e2e/objects/object.spec.ts b/backend/__tests__/e2e/objects/object.spec.ts index ee932148..03e2e99d 100644 --- a/backend/__tests__/e2e/objects/object.spec.ts +++ b/backend/__tests__/e2e/objects/object.spec.ts @@ -171,21 +171,27 @@ describe('Object', () => { ]) }) + const sharedWithUser = createMockUser() describe('Share object', () => { let randomFile: string it('should be able to share object', async () => { const mockUser = createMockUser() - randomFile = await uploadFile(mockUser, 'test.txt', 'test', 'text/plain') + randomFile = await uploadFile( + mockUser, + 'test.txt', + Buffer.from(Math.random().toString()), + 'text/plain', + ) jest .spyOn(AuthManager, 'getUserFromPublicId') - .mockResolvedValueOnce(createMockUser()) + .mockResolvedValueOnce(sharedWithUser) await expect( ObjectUseCases.shareObject(mockUser, randomFile, user.publicId!), ).resolves.not.toThrow() - const sharedRoots = await ObjectUseCases.getSharedRoots(user) + const sharedRoots = await ObjectUseCases.getSharedRoots(sharedWithUser) expect(sharedRoots.rows).toMatchObject([ { headCid: randomFile, @@ -195,21 +201,21 @@ describe('Object', () => { it('should be able to delete shared object', async () => { await expect( - ObjectUseCases.markAsDeleted(user, randomFile), + ObjectUseCases.markAsDeleted(sharedWithUser, randomFile), ).resolves.not.toThrow() }) it('should not be listed in shared objects', async () => { - const sharedRoots = await ObjectUseCases.getSharedRoots(user) + const sharedRoots = await ObjectUseCases.getSharedRoots(sharedWithUser) expect(sharedRoots.rows).toMatchObject([]) }) it('should be able to restore shared object', async () => { await expect( - ObjectUseCases.restoreObject(user, randomFile), + ObjectUseCases.restoreObject(sharedWithUser, randomFile), ).resolves.not.toThrow() - const sharedRoots = await ObjectUseCases.getSharedRoots(user) + const sharedRoots = await ObjectUseCases.getSharedRoots(sharedWithUser) expect(sharedRoots.rows).toMatchObject([ { headCid: randomFile, diff --git a/backend/__tests__/e2e/uploads/files.spec.ts b/backend/__tests__/e2e/uploads/files.spec.ts index f102a86b..39c44e22 100644 --- a/backend/__tests__/e2e/uploads/files.spec.ts +++ b/backend/__tests__/e2e/uploads/files.spec.ts @@ -203,8 +203,10 @@ files.map((file, index) => { describe('Downloading the file', () => { it('should be able to retrieve the file', async () => { - const file = await FilesUseCases.downloadObject(user, cid) - const fileArray = await asyncIterableToPromiseOfArray(file) + const { startDownload } = await FilesUseCases.downloadObject(user, cid) + const fileArray = await asyncIterableToPromiseOfArray( + await startDownload(), + ) const fileBuffer = Buffer.concat(fileArray) expect(fileBuffer).toEqual(rndBuffer) }) diff --git a/backend/__tests__/e2e/uploads/folder.spec.ts b/backend/__tests__/e2e/uploads/folder.spec.ts index b06b66ee..e23b872c 100644 --- a/backend/__tests__/e2e/uploads/folder.spec.ts +++ b/backend/__tests__/e2e/uploads/folder.spec.ts @@ -317,7 +317,8 @@ describe('Folder Upload', () => { it('should be able to download folder as zip', async () => { const zip = await FilesUseCases.downloadObject(user, folderCID) - const zipArray = await asyncIterableToPromiseOfArray(zip) + const dataStream = await zip.startDownload() + const zipArray = await asyncIterableToPromiseOfArray(dataStream) const zipBuffer = Buffer.concat(zipArray) expect(zipBuffer).toBeDefined() expect(() => { diff --git a/backend/__tests__/utils/mocks.ts b/backend/__tests__/utils/mocks.ts index 57ec2e1d..d5da2d3e 100644 --- a/backend/__tests__/utils/mocks.ts +++ b/backend/__tests__/utils/mocks.ts @@ -1,21 +1,11 @@ -import { - UnonboardedUser, - UserRole, - UserWithOrganization, -} from '../../src/models/users' +import { UserRole, UserWithOrganization } from '../../src/models/users' import { v4 } from 'uuid' -export const MOCK_UNONBOARDED_USER: UnonboardedUser = { - oauthProvider: 'google', - oauthUserId: '123', - role: UserRole.User, - publicId: null, - onboarded: false, -} - export const createMockUser = (): UserWithOrganization => { return { - ...MOCK_UNONBOARDED_USER, + oauthProvider: 'google', + oauthUserId: v4(), + role: UserRole.User, onboarded: true, organizationId: v4(), publicId: v4(), diff --git a/backend/migrations/20250130141940-add-public-urls.js b/backend/migrations/20250130141940-add-public-urls.js new file mode 100644 index 00000000..95383878 --- /dev/null +++ b/backend/migrations/20250130141940-add-public-urls.js @@ -0,0 +1,53 @@ +'use strict'; + +var dbm; +var type; +var seed; +var fs = require('fs'); +var path = require('path'); +var Promise; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function(options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; + Promise = options.Promise; +}; + +exports.up = function(db) { + var filePath = path.join(__dirname, 'sqls', '20250130141940-add-public-urls-up.sql'); + return new Promise( function( resolve, reject ) { + fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ + if (err) return reject(err); + console.log('received data: ' + data); + + resolve(data); + }); + }) + .then(function(data) { + return db.runSql(data); + }); +}; + +exports.down = function(db) { + var filePath = path.join(__dirname, 'sqls', '20250130141940-add-public-urls-down.sql'); + return new Promise( function( resolve, reject ) { + fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ + if (err) return reject(err); + console.log('received data: ' + data); + + resolve(data); + }); + }) + .then(function(data) { + return db.runSql(data); + }); +}; + +exports._meta = { + "version": 1 +}; diff --git a/backend/migrations/sqls/20250130141940-add-public-urls-down.sql b/backend/migrations/sqls/20250130141940-add-public-urls-down.sql new file mode 100644 index 00000000..344dfa63 --- /dev/null +++ b/backend/migrations/sqls/20250130141940-add-public-urls-down.sql @@ -0,0 +1,3 @@ +DROP INDEX IF EXISTS idx_published_objects_public_id; +DROP INDEX IF EXISTS idx_published_objects_cid; +DROP TABLE IF EXISTS published_objects; diff --git a/backend/migrations/sqls/20250130141940-add-public-urls-up.sql b/backend/migrations/sqls/20250130141940-add-public-urls-up.sql new file mode 100644 index 00000000..b013526e --- /dev/null +++ b/backend/migrations/sqls/20250130141940-add-public-urls-up.sql @@ -0,0 +1,11 @@ +CREATE TABLE published_objects ( + id UUID PRIMARY KEY, + public_id VARCHAR(255) NOT NULL, + cid VARCHAR(255) NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX idx_published_objects_public_id ON published_objects(public_id); +CREATE INDEX idx_published_objects_cid ON published_objects(cid); +` \ No newline at end of file diff --git a/backend/src/controllers/object.ts b/backend/src/controllers/object.ts index c7db246f..8aec5398 100644 --- a/backend/src/controllers/object.ts +++ b/backend/src/controllers/object.ts @@ -203,7 +203,11 @@ objectController.get( return } - const metadata = await ObjectUseCases.getMetadata(cid) + logger.info(`Attempting to retrieve data for metadataCid: ${cid}`) + const { metadata, startDownload } = await FilesUseCases.downloadObject( + user, + cid, + ) if (!metadata) { res.status(404).json({ error: 'Metadata not found', @@ -211,9 +215,6 @@ objectController.get( return } - logger.info(`Attempting to retrieve data for metadataCid: ${cid}`) - const data = await FilesUseCases.downloadObject(user, cid) - const safeName = encodeURIComponent(metadata.name || 'download') if (metadata.type === 'file') { @@ -225,7 +226,7 @@ objectController.get( res.set('Content-Disposition', `attachment; filename="${safeName}.zip"`) } - pipeline(data, res, (err) => { + pipeline(await startDownload(), res, (err) => { if (err) { if (res.headersSent) return console.error('Error streaming data:', err) @@ -311,4 +312,86 @@ objectController.get( }), ) +objectController.post( + '/:cid/publish', + asyncSafeHandler(async (req, res) => { + const { cid } = req.params + + const user = await handleAuth(req, res) + if (!user) { + return + } + + const publishedObject = await ObjectUseCases.publishObject(user, cid) + + res.json({ result: publishedObject.id }) + }), +) + +objectController.get( + '/:id/public', + asyncSafeHandler(async (req, res) => { + try { + const { id } = req.params + + const user = await handleAuth(req, res) + if (!user) { + return + } + + const { metadata, startDownload } = + await ObjectUseCases.downloadPublishedObject(id) + if (!metadata) { + res.status(404).json({ + error: 'Published object not found', + }) + return + } + + const safeName = encodeURIComponent(metadata.name || 'download') + if (metadata.type === 'file') { + res.set('Content-Type', metadata.mimeType || 'application/octet-stream') + res.set('Content-Disposition', `attachment; filename="${safeName}"`) + res.set('Content-Length', metadata.totalSize.toString()) + } else { + res.set('Content-Type', 'application/zip') + res.set('Content-Disposition', `attachment; filename="${safeName}.zip"`) + } + + pipeline(await startDownload(), res, (err) => { + if (err) { + if (res.headersSent) return + console.error('Error streaming data:', err) + res.status(500).json({ + error: 'Failed to stream data', + details: err.message, + }) + } + }) + } catch (error: unknown) { + console.error('Error retrieving data:', error) + res.status(500).json({ + error: 'Failed to retrieve data', + details: error instanceof Error ? error.message : 'Unknown error', + }) + } + }), +) + +objectController.post( + '/:cid/unpublish', + asyncSafeHandler(async (req, res) => { + const { cid } = req.params + + const user = await handleAuth(req, res) + if (!user) { + return + } + + await ObjectUseCases.unpublishObject(user, cid) + + res.sendStatus(204) + }), +) + export { objectController } diff --git a/backend/src/models/objects/object.ts b/backend/src/models/objects/object.ts index 734caff4..6013be7f 100644 --- a/backend/src/models/objects/object.ts +++ b/backend/src/models/objects/object.ts @@ -1,4 +1,5 @@ import { OffchainMetadata } from '@autonomys/auto-dag-data' +import { AwaitIterable } from 'interface-store' export interface ObjectInformation { cid: string @@ -71,3 +72,8 @@ export const getObjectSummary = (object: ObjectInformation): ObjectSummary => { owners: object.owners, } } + +export interface FileDownload { + metadata: OffchainMetadata + startDownload: () => Promise> +} diff --git a/backend/src/models/users/user.ts b/backend/src/models/users/user.ts index bb151a76..2eddf884 100644 --- a/backend/src/models/users/user.ts +++ b/backend/src/models/users/user.ts @@ -27,32 +27,8 @@ export type OnboardedUser = UserBase & { onboarded: true } -export type UnonboardedUser = UserBase & { - publicId: null - onboarded: false -} - -export type User = OnboardedUser | UnonboardedUser +export type User = OnboardedUser export type UserWithOrganization = User & { organizationId: Organization['id'] } - -export const userFromOAuth = ( - user: Omit, -): UnonboardedUser => { - return { - ...user, - publicId: null, - onboarded: false, - } -} - -export const userFromTable = ( - user: Omit, -): OnboardedUser => { - return { - ...user, - onboarded: true, - } -} diff --git a/backend/src/repositories/objects/publishedObjects.ts b/backend/src/repositories/objects/publishedObjects.ts new file mode 100644 index 00000000..aac03de8 --- /dev/null +++ b/backend/src/repositories/objects/publishedObjects.ts @@ -0,0 +1,74 @@ +import { getDatabase } from '../../drivers/pg.js' + +type DBPublishedObject = { + id: string + public_id: string + cid: string + created_at: string + updated_at: string +} + +type PublishedObject = { + id: string + publicId: string + cid: string +} + +const mapToPublishedObject = ( + dbPublishedObject: DBPublishedObject, +): PublishedObject => { + return { + id: dbPublishedObject.id, + publicId: dbPublishedObject.public_id, + cid: dbPublishedObject.cid, + } +} + +const createPublishedObject = async ( + id: string, + publicId: string, + cid: string, +): Promise => { + const db = await getDatabase() + const result = await db.query( + 'INSERT INTO public.published_objects (id, public_id, cid) VALUES ($1, $2, $3) RETURNING *', + [id, publicId, cid], + ) + return result.rows.map(mapToPublishedObject)[0] +} + +const getPublishedObjectById = async ( + id: string, +): Promise => { + const db = await getDatabase() + const result = await db.query( + 'SELECT * FROM public.published_objects WHERE id = $1', + [id], + ) + return result.rows.map(mapToPublishedObject)[0] || null +} + +const updatePublishedObject = async ( + id: string, + publicId: string, + cid: string, +): Promise => { + const db = await getDatabase() + const result = await db.query( + 'UPDATE public.published_objects SET public_id = $1, cid = $2 WHERE id = $3 RETURNING *', + [publicId, cid, id], + ) + return result.rows.map(mapToPublishedObject)[0] +} + +const deletePublishedObject = async (id: string): Promise => { + const db = await getDatabase() + await db.query('DELETE FROM public.published_objects WHERE id = $1', [id]) +} + +export const publishedObjectsRepository = { + createPublishedObject, + getPublishedObjectById, + updatePublishedObject, + deletePublishedObject, +} diff --git a/backend/src/repositories/users/subscriptions.ts b/backend/src/repositories/users/subscriptions.ts index 42d858fc..f44b527a 100644 --- a/backend/src/repositories/users/subscriptions.ts +++ b/backend/src/repositories/users/subscriptions.ts @@ -33,6 +33,15 @@ const getByOrganizationId = async ( return mapRows(result.rows)[0] || null } +const getById = async (id: string): Promise => { + const db = await getDatabase() + const result = await db.query( + 'SELECT * FROM subscriptions WHERE id = $1', + [id], + ) + return mapRows(result.rows)[0] || null +} + const createSubscription = async ( id: string, organizationId: string, @@ -66,4 +75,5 @@ export const subscriptionsRepository = { getByOrganizationId, createSubscription, updateSubscription, + getById, } diff --git a/backend/src/useCases/objects/files.ts b/backend/src/useCases/objects/files.ts index 1f67208d..03513650 100644 --- a/backend/src/useCases/objects/files.ts +++ b/backend/src/useCases/objects/files.ts @@ -29,6 +29,7 @@ import { } from '../../utils/async.js' import { downloadService } from '../../services/download/index.js' import { FileGateway } from '../../services/dsn/fileGateway/index.js' +import { FileDownload } from '../../models/objects/object.js' const generateFileArtifacts = async ( uploadId: string, @@ -213,7 +214,7 @@ const retrieveAndReassembleFolderAsZip = async ( const downloadObject = async ( reader: UserWithOrganization, cid: string, -): Promise> => { +): Promise => { const metadata = await ObjectUseCases.getMetadata(cid) if (!metadata) { throw new Error(`Metadata with CID ${cid} not found`) @@ -229,15 +230,20 @@ const downloadObject = async ( throw new Error('Not enough download credits') } - const download = await downloadService.download(cid) + return { + metadata, + startDownload: async () => { + await SubscriptionsUseCases.registerInteraction( + reader, + InteractionType.Download, + metadata.totalSize, + ) - await SubscriptionsUseCases.registerInteraction( - reader, - InteractionType.Download, - metadata.totalSize, - ) + const download = await downloadService.download(cid) - return download + return download + }, + } } const handleFileUploadFinalization = async ( diff --git a/backend/src/useCases/objects/object.ts b/backend/src/useCases/objects/object.ts index 56a72765..3dd19253 100644 --- a/backend/src/useCases/objects/object.ts +++ b/backend/src/useCases/objects/object.ts @@ -1,5 +1,5 @@ import { OffchainMetadata } from '@autonomys/auto-dag-data' -import { User } from '../../models/users/index.js' +import { User, UserWithOrganization } from '../../models/users/index.js' import { getObjectSummary, ObjectInformation, @@ -17,6 +17,9 @@ import { UploadStatusUseCases } from './uploadStatus.js' import { MetadataEntry } from '../../repositories/objects/metadata.js' import { PaginatedResult } from './common.js' import { AuthManager } from '../../services/auth/index.js' +import { publishedObjectsRepository } from '../../repositories/objects/publishedObjects.js' +import { v4 } from 'uuid' +import { FilesUseCases } from './files.js' const getMetadata = async (cid: string) => { const entry = await metadataRepository.getMetadata(cid) @@ -300,6 +303,51 @@ const markAsArchived = async (cid: string) => { await nodesRepository.removeNodesByHeadCid(cid) } +const publishObject = async (user: UserWithOrganization, cid: string) => { + const objects = await publishedObjectsRepository.getPublishedObjectById(cid) + if (objects) { + return objects + } + + const publishedObject = + await publishedObjectsRepository.createPublishedObject( + v4(), + user.publicId, + cid, + ) + + return publishedObject +} + +const downloadPublishedObject = async (id: string) => { + const publishedObject = + await publishedObjectsRepository.getPublishedObjectById(id) + if (!publishedObject) { + throw new Error('Published object not found') + } + + const user = await AuthManager.getUserFromPublicId(publishedObject.publicId) + if (!user) { + throw new Error('User does not have a subscription') + } + + return FilesUseCases.downloadObject(user, publishedObject.cid) +} + +const unpublishObject = async (user: User, cid: string) => { + const publishedObject = + await publishedObjectsRepository.getPublishedObjectById(cid) + if (!publishedObject) { + return + } + + if (publishedObject.publicId !== user.publicId) { + throw new Error('User does not have access to this object') + } + + await publishedObjectsRepository.deletePublishedObject(cid) +} + export const ObjectUseCases = { getMetadata, getObjectInformation, @@ -318,4 +366,7 @@ export const ObjectUseCases = { hasAllNodesArchived, getNonArchivedObjects, markAsArchived, + publishObject, + downloadPublishedObject, + unpublishObject, } diff --git a/backend/src/useCases/users/subscriptions.ts b/backend/src/useCases/users/subscriptions.ts index 7459df85..2581b131 100644 --- a/backend/src/useCases/users/subscriptions.ts +++ b/backend/src/useCases/users/subscriptions.ts @@ -57,6 +57,12 @@ const getOrCreateSubscription = async ( return subscription } +const getSubscriptionById = async ( + id: string, +): Promise => { + return subscriptionsRepository.getById(id) +} + const initSubscription = async ( organizationId: string, ): Promise => { @@ -178,4 +184,5 @@ export const SubscriptionsUseCases = { getPendingCreditsByUserAndType, registerInteraction, getUserListSubscriptions, + getSubscriptionById, } From cd157b4d71d36f4f5ec80c65c518052b814e674c Mon Sep 17 00:00:00 2001 From: Carlos Lostao Date: Thu, 30 Jan 2025 15:54:13 +0100 Subject: [PATCH 2/4] update: add publicId to hasura metadata --- auth/src/models/jwt.ts | 37 ++++++++++--------- .../services/authManager/providers/custom.ts | 1 + 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/auth/src/models/jwt.ts b/auth/src/models/jwt.ts index 5165a92a..fcc0b26c 100644 --- a/auth/src/models/jwt.ts +++ b/auth/src/models/jwt.ts @@ -1,26 +1,27 @@ type BaseTokenPayload = { - id: string - isRefreshToken: boolean - oauthProvider: string - oauthUserId: string -} + id: string; + isRefreshToken: boolean; + oauthProvider: string; + oauthUserId: string; +}; export type CustomAccessTokenPayload = BaseTokenPayload & { - isRefreshToken: false - refreshTokenId: string - 'https://hasura.io/jwt/claims': { - 'x-hasura-default-role': string - 'x-hasura-allowed-roles': string[] - 'x-hasura-oauth-provider': string - 'x-hasura-oauth-user-id': string - 'x-hasura-organization-id': string - } -} + isRefreshToken: false; + refreshTokenId: string; + "https://hasura.io/jwt/claims": { + "x-hasura-default-role": string; + "x-hasura-allowed-roles": string[]; + "x-hasura-oauth-provider": string; + "x-hasura-oauth-user-id": string; + "x-hasura-organization-id": string; + "x-hasura-public-id": string; + }; +}; export type CustomRefreshTokenPayload = BaseTokenPayload & { - isRefreshToken: true -} + isRefreshToken: true; +}; export type CustomTokenPayload = | CustomAccessTokenPayload - | CustomRefreshTokenPayload + | CustomRefreshTokenPayload; diff --git a/auth/src/services/authManager/providers/custom.ts b/auth/src/services/authManager/providers/custom.ts index 3bea2268..54ef2dae 100644 --- a/auth/src/services/authManager/providers/custom.ts +++ b/auth/src/services/authManager/providers/custom.ts @@ -63,6 +63,7 @@ const createAccessToken = async ( "x-hasura-organization-id": userInfo?.onboarded ? userInfo.organizationId : "none", + "x-hasura-public-id": userInfo?.publicId ?? "none", }, }; From 8fbd49c0eecbdd5bb22e8f99653c27834e15e88d Mon Sep 17 00:00:00 2001 From: Carlos Lostao Date: Thu, 30 Jan 2025 15:55:27 +0100 Subject: [PATCH 3/4] update: hasura metadata --- .../databases/default/tables/metadata.yaml | 9 +++++++++ .../default/tables/published_objects.yaml | 15 +++++++++++++++ .../metadata/databases/default/tables/tables.yaml | 1 + 3 files changed, 25 insertions(+) create mode 100644 hasura/metadata/databases/default/tables/published_objects.yaml diff --git a/hasura/metadata/databases/default/tables/metadata.yaml b/hasura/metadata/databases/default/tables/metadata.yaml index 3e12bac1..fdc81854 100644 --- a/hasura/metadata/databases/default/tables/metadata.yaml +++ b/hasura/metadata/databases/default/tables/metadata.yaml @@ -33,6 +33,15 @@ object_relationships: remote_table: name: nodes schema: public +object_relationships: + - name: published_objects + using: + manual_configuration: + column_mapping: + head_cid: cid + remote_table: + name: published_objects + schema: public array_relationships: - name: nodes using: diff --git a/hasura/metadata/databases/default/tables/published_objects.yaml b/hasura/metadata/databases/default/tables/published_objects.yaml new file mode 100644 index 00000000..0fe383af --- /dev/null +++ b/hasura/metadata/databases/default/tables/published_objects.yaml @@ -0,0 +1,15 @@ +table: + name: published_objects + schema: public +select_permissions: + - role: user + permission: + columns: + - id + - public_id + - cid + - created_at + - updated_at + filter: { public_id: { _eq: "X-Hasura-Public-Id" } } + allow_aggregations: true + comment: "" diff --git a/hasura/metadata/databases/default/tables/tables.yaml b/hasura/metadata/databases/default/tables/tables.yaml index d32dd516..ffabbdfd 100644 --- a/hasura/metadata/databases/default/tables/tables.yaml +++ b/hasura/metadata/databases/default/tables/tables.yaml @@ -8,3 +8,4 @@ - "!include transaction_results.yaml" - "!include users.yaml" - "!include users_organizations.yaml" +- "!include published_objects.yaml" From b867842a7cdc1440f43f2c7273c3489342c00085 Mon Sep 17 00:00:00 2001 From: Carlos Lostao Date: Thu, 30 Jan 2025 15:56:02 +0100 Subject: [PATCH 4/4] fix: small issues --- .../__tests__/e2e/objects/publicUrl.spec.ts | 70 ++++++++++ .../20250130141940-add-public-urls.js | 88 +++++++------ .../20250130141940-add-public-urls-up.sql | 7 +- .../repositories/objects/publishedObjects.ts | 2 +- frontend/src/models/FileTree.tsx | 124 ------------------ 5 files changed, 120 insertions(+), 171 deletions(-) create mode 100644 backend/__tests__/e2e/objects/publicUrl.spec.ts delete mode 100644 frontend/src/models/FileTree.tsx diff --git a/backend/__tests__/e2e/objects/publicUrl.spec.ts b/backend/__tests__/e2e/objects/publicUrl.spec.ts new file mode 100644 index 00000000..b1c37d02 --- /dev/null +++ b/backend/__tests__/e2e/objects/publicUrl.spec.ts @@ -0,0 +1,70 @@ +import { InteractionType } from '../../../src/models/objects/interactions' +import { UserWithOrganization } from '../../../src/models/users/user' +import { PublishedObject } from '../../../src/repositories/objects/publishedObjects' +import { AuthManager } from '../../../src/services/auth' +import { ObjectUseCases, SubscriptionsUseCases } from '../../../src/useCases' +import { asyncIterableToPromiseOfArray } from '../../../src/utils/async' +import { dbMigration } from '../../utils/dbMigrate' +import { createMockUser } from '../../utils/mocks' +import { uploadFile } from '../../utils/uploads' +import { jest } from '@jest/globals' + +describe('Public URL', () => { + let user: UserWithOrganization + let fileCid: string + let publishedObject: PublishedObject + const content = 'test' + + beforeAll(async () => { + await dbMigration.up() + user = createMockUser() + fileCid = await uploadFile(user, 'test.txt', content, 'text/plain') + }) + + afterAll(async () => { + await dbMigration.down() + }) + + it('should be able to publish and retrieve', async () => { + publishedObject = await ObjectUseCases.publishObject(user, fileCid) + + expect(publishedObject).toMatchObject({ + publicId: user.publicId, + cid: fileCid, + id: expect.any(String), + }) + }) + + it('should be downloadable by public id and credits should be deducted', async () => { + jest.spyOn(AuthManager, 'getUserFromPublicId').mockResolvedValue(user) + + const { metadata, startDownload } = + await ObjectUseCases.downloadPublishedObject(publishedObject.id) + + expect(metadata).toMatchObject({ + type: 'file', + dataCid: publishedObject.cid, + }) + + const pendingCredits = + await SubscriptionsUseCases.getPendingCreditsByUserAndType( + user, + InteractionType.Download, + ) + + const downloadedContent = Buffer.concat( + await asyncIterableToPromiseOfArray(await startDownload()), + ) + expect(downloadedContent).toEqual(Buffer.from(content)) + + const updatedPendingCredits = + await SubscriptionsUseCases.getPendingCreditsByUserAndType( + user, + InteractionType.Download, + ) + + expect(updatedPendingCredits).toBe( + pendingCredits - downloadedContent.length, + ) + }) +}) diff --git a/backend/migrations/20250130141940-add-public-urls.js b/backend/migrations/20250130141940-add-public-urls.js index 95383878..fd6734fa 100644 --- a/backend/migrations/20250130141940-add-public-urls.js +++ b/backend/migrations/20250130141940-add-public-urls.js @@ -1,53 +1,57 @@ -'use strict'; +'use strict' -var dbm; -var type; -var seed; -var fs = require('fs'); -var path = require('path'); -var Promise; +var dbm +var type +var seed +var fs = require('fs') +var path = require('path') +var Promise /** - * We receive the dbmigrate dependency from dbmigrate initially. - * This enables us to not have to rely on NODE_PATH. - */ -exports.setup = function(options, seedLink) { - dbm = options.dbmigrate; - type = dbm.dataType; - seed = seedLink; - Promise = options.Promise; -}; + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate + type = dbm.dataType + seed = seedLink + Promise = options.Promise +} -exports.up = function(db) { - var filePath = path.join(__dirname, 'sqls', '20250130141940-add-public-urls-up.sql'); - return new Promise( function( resolve, reject ) { - fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ - if (err) return reject(err); - console.log('received data: ' + data); +exports.up = function (db) { + var filePath = path.join( + __dirname, + 'sqls', + '20250130141940-add-public-urls-up.sql', + ) + return new Promise(function (resolve, reject) { + fs.readFile(filePath, { encoding: 'utf-8' }, function (err, data) { + if (err) return reject(err) - resolve(data); - }); + resolve(data) + }) + }).then(function (data) { + return db.runSql(data) }) - .then(function(data) { - return db.runSql(data); - }); -}; +} -exports.down = function(db) { - var filePath = path.join(__dirname, 'sqls', '20250130141940-add-public-urls-down.sql'); - return new Promise( function( resolve, reject ) { - fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ - if (err) return reject(err); - console.log('received data: ' + data); +exports.down = function (db) { + var filePath = path.join( + __dirname, + 'sqls', + '20250130141940-add-public-urls-down.sql', + ) + return new Promise(function (resolve, reject) { + fs.readFile(filePath, { encoding: 'utf-8' }, function (err, data) { + if (err) return reject(err) - resolve(data); - }); + resolve(data) + }) + }).then(function (data) { + return db.runSql(data) }) - .then(function(data) { - return db.runSql(data); - }); -}; +} exports._meta = { - "version": 1 -}; + version: 1, +} diff --git a/backend/migrations/sqls/20250130141940-add-public-urls-up.sql b/backend/migrations/sqls/20250130141940-add-public-urls-up.sql index b013526e..94ab0f94 100644 --- a/backend/migrations/sqls/20250130141940-add-public-urls-up.sql +++ b/backend/migrations/sqls/20250130141940-add-public-urls-up.sql @@ -1,11 +1,10 @@ CREATE TABLE published_objects ( - id UUID PRIMARY KEY, - public_id VARCHAR(255) NOT NULL, - cid VARCHAR(255) NOT NULL, + id text PRIMARY KEY, + public_id text NOT NULL, + cid text NOT NULL, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE INDEX idx_published_objects_public_id ON published_objects(public_id); CREATE INDEX idx_published_objects_cid ON published_objects(cid); -` \ No newline at end of file diff --git a/backend/src/repositories/objects/publishedObjects.ts b/backend/src/repositories/objects/publishedObjects.ts index aac03de8..16a15de6 100644 --- a/backend/src/repositories/objects/publishedObjects.ts +++ b/backend/src/repositories/objects/publishedObjects.ts @@ -8,7 +8,7 @@ type DBPublishedObject = { updated_at: string } -type PublishedObject = { +export type PublishedObject = { id: string publicId: string cid: string diff --git a/frontend/src/models/FileTree.tsx b/frontend/src/models/FileTree.tsx deleted file mode 100644 index 59705b49..00000000 --- a/frontend/src/models/FileTree.tsx +++ /dev/null @@ -1,124 +0,0 @@ -// import JSZip from 'jszip'; - -// type FolderTreeFolder = { -// name: string; -// type: 'folder'; -// children: FolderTree[]; -// id: string; -// }; - -// type FolderTreeFile = { -// name: string; -// type: 'file'; -// id: string; -// }; - -// export const getTreeFiles = ( -// tree: FolderTree, -// accumulatedPath: string, -// ): File[] => { -// if (tree.type === 'file') { -// return [{ ...tree, webkitRelativePath: accumulatedPath }]; -// } -// return tree.children.flatMap(getTreeFiles); -// }; - -// export type FolderTree = FolderTreeFolder | FolderTreeFile; - -// export const getFileId = (file: File) => { -// return `${file.webkitRelativePath}/${file.name}`; -// }; - -// export const constructFromFileSystemEntries = ( -// entries: { file: File; path: string }[], -// ): [FolderTree, Record] => { -// const root: FolderTreeFolder = { -// name: 'root', -// type: 'folder', -// children: [], -// id: 'root', -// }; - -// const files: Record = {}; -// for (const entry of entries) { -// if (entry.file) { -// files[entry.path] = entry.file; -// } -// const pathParts = entry.path.split('/').filter(Boolean); -// let currentFolder = root; - -// for (const [index, part] of Array.from(pathParts.entries())) { -// // Check if the part already exists in the current folder's children -// let existingFolder = currentFolder.children.find( -// (child) => child.name === part, -// ); - -// if (!existingFolder) { -// // If it's the last part, create a file node -// if (index === pathParts.length - 1) { -// const fileNode: FolderTreeFile = { -// name: part, -// type: 'file', -// id: entry.path, -// }; -// currentFolder.children.push(fileNode); -// } else { -// // Create a new folder node -// const folderNode: FolderTreeFolder = { -// name: part, -// type: 'folder', -// children: [], -// id: `${currentFolder.id.split('/').slice(1).join('/')}/${part}`, -// }; -// currentFolder.children.push(folderNode); -// existingFolder = folderNode; -// } -// } -// currentFolder = existingFolder as FolderTreeFolder; // Move to the next folder -// } -// } - -// return [root.children.length === 1 ? root.children[0] : root, files]; -// }; - -// const addFilesToZip = ( -// folder: JSZip, -// folderNode: FolderTreeFolder, -// files: Record, -// ) => { -// folderNode.children.forEach((child) => { -// if (child.type === 'file') { -// folder.file(child.name, files[child.id]); -// } else if (child.type === 'folder') { -// const subFolder = folder.folder(child.name); -// if (!subFolder) { -// throw new Error('Failed to create folder in zip'); -// } -// addFilesToZip(subFolder, child as FolderTreeFolder, files); -// } -// }); -// }; - -// export const constructZipBlob = ( -// tree: FolderTree, -// files: Record, -// ) => { -// const zip = new JSZip(); -// if (tree.type === 'file') { -// throw new Error('Cannot construct zip from file'); -// } - -// tree.children.forEach((node) => { -// if (node.type === 'file') { -// zip.file(node.name, files[node.id]); -// } else if (node.type === 'folder') { -// const folder = zip.folder(node.name); -// if (!folder) { -// throw new Error('Failed to create folder in zip'); -// } -// addFilesToZip(folder, node as FolderTreeFolder, files); -// } -// }); - -// return zip.generateAsync({ type: 'blob' }); -// };