diff --git a/.gitignore b/.gitignore index 857a47a3..a1b0646c 100644 --- a/.gitignore +++ b/.gitignore @@ -37,5 +37,6 @@ coverage/ *.tmp *.temp +.cache/ database.sqlite .cursorrules \ No newline at end of file diff --git a/backend/__tests__/e2e/uploads/files.spec.ts b/backend/__tests__/e2e/uploads/files.spec.ts index fc45320a..f102a86b 100644 --- a/backend/__tests__/e2e/uploads/files.spec.ts +++ b/backend/__tests__/e2e/uploads/files.spec.ts @@ -29,7 +29,6 @@ import { nodesRepository, } from '../../../src/repositories/index.js' import { InteractionType } from '../../../src/models/objects/interactions.js' -import { databaseDownloadCache } from '../../../src/services/download/databaseDownloadCache/index.js' import { memoryDownloadCache } from '../../../src/services/download/memoryDownloadCache/index.js' import { OwnerRole, @@ -38,6 +37,7 @@ import { import { FileGateway } from '../../../src/services/dsn/fileGateway/index.js' import { jest } from '@jest/globals' import { downloadService } from '../../../src/services/download/index.js' +import { fsCache } from '../../../src/services/download/fsCache/singleton.js' import { handleArchivedObjects } from '../../../src/services/upload/nodeRemover/index.js' const files = [ @@ -223,10 +223,10 @@ files.map((file, index) => { }) it('download cache should be updated', async () => { - const asyncFromDatabase = await databaseDownloadCache.get(cid) + const asyncFromDatabase = await fsCache.get(cid) expect(asyncFromDatabase).not.toBeNull() const fileArrayFromDatabase = await asyncIterableToPromiseOfArray( - asyncFromDatabase!, + asyncFromDatabase!.data, ) const fileBufferFromDatabase = Buffer.concat(fileArrayFromDatabase) expect(fileBufferFromDatabase).toEqual(rndBuffer) @@ -323,11 +323,11 @@ files.map((file, index) => { expect(metadata?.is_archived).toBe(true) expect(memoryDownloadCache.has(cid)).toBe(true) - expect(await databaseDownloadCache.has(cid)).toBe(true) + expect(fsCache.get(cid)).not.toBeNull() }) - it('should be able to downloaded from gateway', async () => { - await databaseDownloadCache.clear() + it('should be able to remove the nodes', async () => { + await fsCache.clear() await memoryDownloadCache.clear() const downloadFileMock = jest diff --git a/backend/migrations/20250127141651-delete-download-cache-schema.js b/backend/migrations/20250127141651-delete-download-cache-schema.js new file mode 100644 index 00000000..5666f179 --- /dev/null +++ b/backend/migrations/20250127141651-delete-download-cache-schema.js @@ -0,0 +1,53 @@ +'use strict'; + +var dbm; +var type; +var seed; +var fs = require('fs'); +var path = require('path'); +var Promise; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function(options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; + Promise = options.Promise; +}; + +exports.up = function(db) { + var filePath = path.join(__dirname, 'sqls', '20250127141651-delete-download-cache-schema-up.sql'); + return new Promise( function( resolve, reject ) { + fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ + if (err) return reject(err); + console.log('received data: ' + data); + + resolve(data); + }); + }) + .then(function(data) { + return db.runSql(data); + }); +}; + +exports.down = function(db) { + var filePath = path.join(__dirname, 'sqls', '20250127141651-delete-download-cache-schema-down.sql'); + return new Promise( function( resolve, reject ) { + fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ + if (err) return reject(err); + console.log('received data: ' + data); + + resolve(data); + }); + }) + .then(function(data) { + return db.runSql(data); + }); +}; + +exports._meta = { + "version": 1 +}; diff --git a/backend/migrations/sqls/20250127141651-delete-download-cache-schema-down.sql b/backend/migrations/sqls/20250127141651-delete-download-cache-schema-down.sql new file mode 100644 index 00000000..88de4e9b --- /dev/null +++ b/backend/migrations/sqls/20250127141651-delete-download-cache-schema-down.sql @@ -0,0 +1,14 @@ +CREATE SCHEMA IF NOT EXISTS download_cache; + +CREATE TABLE IF NOT EXISTS download_cache.file_parts ( + "cid" TEXT, + "index" INTEGER NOT NULL, + "data" BYTEA NOT NULL, + PRIMARY KEY (cid, index) +); + +CREATE TABLE IF NOT EXISTS download_cache.registry ( + "cid" TEXT PRIMARY KEY, + "last_accessed_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "size" BIGINT NOT NULL +); diff --git a/backend/migrations/sqls/20250127141651-delete-download-cache-schema-up.sql b/backend/migrations/sqls/20250127141651-delete-download-cache-schema-up.sql new file mode 100644 index 00000000..ac44181e --- /dev/null +++ b/backend/migrations/sqls/20250127141651-delete-download-cache-schema-up.sql @@ -0,0 +1,3 @@ +DROP TABLE IF EXISTS download_cache.file_parts; +DROP TABLE IF EXISTS download_cache.registry; +DROP SCHEMA IF EXISTS download_cache; diff --git a/backend/package.json b/backend/package.json index cfa6e987..0ce3c5e5 100644 --- a/backend/package.json +++ b/backend/package.json @@ -15,22 +15,27 @@ "dependencies": { "@autonomys/auto-dag-data": "^1.0.8", "@autonomys/auto-drive": "^1.0.5", + "@keyvhq/sqlite": "^2.1.6", "@polkadot/api": "^12.3.1", "@polkadot/types": "^13.0.1", "@polkadot/util-crypto": "^13.0.2", "aws-sdk": "^2.1692.0", "body-parser": "^1.20.2", + "cache-manager": "^6.4.0", "cors": "^2.8.5", "db-migrate": "^0.11.14", "db-migrate-pg": "^1.5.2", "dotenv": "^16.4.5", "express": "^4.19.2", "jsonwebtoken": "^9.0.2", + "keyv": "^5.2.1", + "lru-cache": "^11.0.2", "multer": "^1.4.5-lts.1", "multiformats": "^13.2.2", "pg": "^8.13.0", "pg-format": "^1.0.4", "pizzip": "^3.1.7", + "tar": "^7.4.3", "uuid": "^10.0.0", "websocket": "^1.0.35", "winston": "^3.17.0", diff --git a/backend/src/config.ts b/backend/src/config.ts index 5c2e5432..95d9f4e5 100644 --- a/backend/src/config.ts +++ b/backend/src/config.ts @@ -1,8 +1,10 @@ import { env } from './utils/misc.js' -const DEFAULT_CHUNK_SIZE = 10 * 1024 ** 2 const DEFAULT_MAX_CACHE_SIZE = BigInt(10 * 1024 ** 3) +const DEFAULT_CACHE_MAX_SIZE = 10 * 1024 ** 3 // 10GB +const DEFAULT_CACHE_TTL = 1000000 // 1000000 seconds + export const config = { logLevel: env('LOG_LEVEL', 'info'), postgres: { @@ -12,17 +14,6 @@ export const config = { requestSizeLimit: env('REQUEST_SIZE_LIMIT', '200mb'), corsAllowedOrigins: process.env.CORS_ALLOWED_ORIGINS, rpcEndpoint: env('RPC_ENDPOINT', 'ws://localhost:9944'), - databaseDownloadCache: { - chunkSize: Number( - env('DATABASE_DOWNLOAD_CACHE_CHUNK_SIZE', DEFAULT_CHUNK_SIZE.toString()), - ), - maxCacheSize: BigInt( - env( - 'DATABASE_DOWNLOAD_CACHE_MAX_SIZE', - DEFAULT_MAX_CACHE_SIZE.toString(), - ), - ), - }, memoryDownloadCache: { maxCacheSize: Number( env('MEMORY_DOWNLOAD_CACHE_MAX_SIZE', DEFAULT_MAX_CACHE_SIZE.toString()), @@ -36,4 +27,9 @@ export const config = { url: env('AUTH_SERVICE_URL', 'http://localhost:3030'), token: env('AUTH_SERVICE_API_KEY'), }, + cacheDir: env('CACHE_DIR', './.cache'), + cacheMaxSize: Number( + env('CACHE_MAX_SIZE', DEFAULT_CACHE_MAX_SIZE.toString()), + ), + cacheTtl: Number(env('CACHE_TTL', DEFAULT_CACHE_TTL.toString())), } diff --git a/backend/src/repositories/cache/fileParts.ts b/backend/src/repositories/cache/fileParts.ts deleted file mode 100644 index c9cb0942..00000000 --- a/backend/src/repositories/cache/fileParts.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { getDatabase } from '../../drivers/pg.js' - -export interface CacheFilePart { - cid: string - index: number - data: Buffer -} - -const addFilePart = async (filePart: CacheFilePart) => { - const db = await getDatabase() - await db.query({ - text: 'INSERT INTO download_cache.file_parts (cid, index, data) VALUES ($1, $2, $3)', - values: [filePart.cid, filePart.index, filePart.data], - }) -} - -const getFilePartCount = async (cid: string) => { - const db = await getDatabase() - const result = await db.query<{ count: number }>({ - text: 'SELECT COUNT(*) as count FROM download_cache.file_parts WHERE cid = $1', - values: [cid], - }) - - return result.rows[0].count -} - -const getFilePart = async (cid: string, index: number) => { - const db = await getDatabase() - const result = await db.query({ - text: 'SELECT * FROM download_cache.file_parts WHERE cid = $1 AND index = $2', - values: [cid, index], - }) - - return result.rows[0] -} - -const removeFileParts = async (cids: string[]) => { - const db = await getDatabase() - await db.query({ - text: 'DELETE FROM download_cache.file_parts WHERE cid = ANY($1)', - values: [cids], - }) -} - -export const clear = async () => { - const db = await getDatabase() - await db.query({ - text: 'DELETE FROM download_cache.file_parts', - }) -} - -export const downloadCacheFilePartsRepository = { - addFilePart, - getFilePartCount, - getFilePart, - removeFileParts, - clear, -} diff --git a/backend/src/repositories/cache/registry.ts b/backend/src/repositories/cache/registry.ts deleted file mode 100644 index 75895baf..00000000 --- a/backend/src/repositories/cache/registry.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { getDatabase } from '../../drivers/pg.js' - -interface RegistryEntry { - cid: string - last_accessed_at: Date - size: string -} - -const toBigInt = (value: RegistryEntry) => ({ - ...value, - size: BigInt(value.size).valueOf(), -}) - -const addEntry = async (entry: RegistryEntry) => { - const db = await getDatabase() - await db - .query({ - text: 'INSERT INTO download_cache.registry (cid, last_accessed_at, size) VALUES ($1, $2, $3)', - values: [entry.cid, entry.last_accessed_at, entry.size], - }) - .then((e) => e.rows.map(toBigInt)) -} - -const removeEntries = async (cids: string[]) => { - const db = await getDatabase() - await db.query({ - text: 'DELETE FROM download_cache.registry WHERE cid = ANY($1)', - values: [cids], - }) -} - -const getEntriesSortedByLastAccessedAt = async () => { - const db = await getDatabase() - const result = await db.query({ - text: 'SELECT * FROM download_cache.registry ORDER BY last_accessed_at ASC', - }) - - return result.rows.map(toBigInt) -} - -const getTotalSize = async () => { - const db = await getDatabase() - return db - .query<{ size: string | null }>({ - text: 'SELECT SUM(size) as size FROM download_cache.registry', - }) - .then((result) => BigInt(result.rows[0].size ?? '0').valueOf()) -} - -const getEntry = async (cid: string) => { - const db = await getDatabase() - const result = await db.query({ - text: 'SELECT * FROM download_cache.registry WHERE cid = $1', - values: [cid], - }) - - return result.rows.map(toBigInt)[0] -} - -export const clear = async () => { - const db = await getDatabase() - await db.query({ - text: 'DELETE FROM download_cache.registry', - }) -} - -export const registryRepository = { - addEntry, - removeEntries, - getEntriesSortedByLastAccessedAt, - getTotalSize, - getEntry, - clear, -} diff --git a/backend/src/services/download/databaseDownloadCache/index.ts b/backend/src/services/download/databaseDownloadCache/index.ts deleted file mode 100644 index 7d8430af..00000000 --- a/backend/src/services/download/databaseDownloadCache/index.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { AwaitIterable } from 'interface-store' -import { downloadCacheFilePartsRepository } from '../../../repositories/cache/fileParts.js' -import { registryRepository } from '../../../repositories/cache/registry.js' -import { asyncByChunk } from '../../../utils/async.js' -import { config } from '../../../config.js' - -const internalSet = async function* ( - cid: string, - data: AwaitIterable, - size: bigint, -): AsyncIterable { - let i = 0 - for await (const chunk of asyncByChunk( - data, - config.databaseDownloadCache.chunkSize, - )) { - await downloadCacheFilePartsRepository.addFilePart({ - cid, - index: i, - data: chunk, - }) - yield chunk - i++ - } - - if (i > 0) { - await registryRepository.addEntry({ - cid, - size: size.toString(), - last_accessed_at: new Date(), - }) - } -} - -const updateCacheSize = async (size: bigint) => { - let currentSize = BigInt(await registryRepository.getTotalSize()) - const newSize = currentSize + BigInt(size) - if (newSize > config.databaseDownloadCache.maxCacheSize) { - const entries = await registryRepository.getEntriesSortedByLastAccessedAt() - for (const entry of entries) { - if (currentSize <= config.databaseDownloadCache.maxCacheSize) { - break - } - await registryRepository.removeEntries([entry.cid]) - currentSize -= BigInt(entry.size) - } - } -} - -const set = async ( - cid: string, - data: AwaitIterable, - size: bigint, -): Promise> => { - if (await has(cid)) { - return data - } - - await updateCacheSize(size) - return internalSet(cid, data, size) -} - -const get = async function* (cid: string): AsyncIterable { - const entry = await registryRepository.getEntry(cid) - if (!entry) { - return null - } - - const fileParts = await downloadCacheFilePartsRepository.getFilePartCount(cid) - for (let i = 0; i < fileParts; i++) { - const filePart = await downloadCacheFilePartsRepository.getFilePart(cid, i) - yield filePart.data - } -} - -const has = async (cid: string): Promise => { - const entry = await registryRepository.getEntry(cid) - - return Boolean(entry).valueOf() -} - -export const clear = async () => { - await downloadCacheFilePartsRepository.clear() - await registryRepository.clear() -} - -export const databaseDownloadCache = { - set, - get, - has, - clear, -} diff --git a/backend/src/services/download/fsCache/index.ts b/backend/src/services/download/fsCache/index.ts new file mode 100644 index 00000000..49da60e3 --- /dev/null +++ b/backend/src/services/download/fsCache/index.ts @@ -0,0 +1,122 @@ +import fsPromises from 'fs/promises' +import fs from 'fs' +import path from 'path' +import { createCache } from 'cache-manager' +import { writeFile } from '../../../utils/fs.js' +import { BaseCacheConfig, FileResponse } from './types.js' +import { logger } from '../../../drivers/logger.js' + +const CHARS_PER_PARTITION = 2 + +type FileCacheEntry = Omit + +type UncheckedFileCacheEntry = FileCacheEntry | null | undefined + +export const createFileCache = (config: BaseCacheConfig) => { + const cidToFilePath = (cid: string) => { + const partitions = config.pathPartitions + + let filePath = '' + let head = cid + for (let i = 0; i < partitions; i++) { + filePath = path.join(filePath, `${head.slice(-CHARS_PER_PARTITION)}/`) + head = head.slice(0, -CHARS_PER_PARTITION) + } + filePath = path.join(filePath, head) + + return path.join(config.cacheDir, filePath) + } + + const filepathCache = createCache({ + stores: config.stores, + nonBlocking: true, + }) + + const deserialize = (data: Omit | null) => { + if (!data) { + return null + } + + return { + ...data, + size: BigInt(data.size ?? 0), + } + } + + const get = async (cid: string): Promise => { + const start = performance.now() + const data: UncheckedFileCacheEntry = deserialize( + await filepathCache.get(cid), + ) + const end = performance.now() + logger.debug(`Getting file cache entry for ${cid} took ${end - start}ms`) + if (!data) { + return null + } + + const path = cidToFilePath(cid) + + return { + ...data, + data: fs.createReadStream(path), + } + } + + const set = async (cid: string, fileResponse: FileResponse) => { + const filePath = cidToFilePath(cid) + + const { data, ...rest } = fileResponse + + const start = performance.now() + const cachePromise = filepathCache + .set(cid, { + ...rest, + }) + .then(() => { + const end = performance.now() + logger.debug(`Caching file for ${cid} took ${end - start}ms`) + }) + + const start2 = performance.now() + const writePromise = writeFile(filePath, data).then(() => { + const end2 = performance.now() + logger.debug(`Writing file to cache for ${cid} took ${end2 - start2}ms`) + }) + + await Promise.all([cachePromise, writePromise]) + } + + const remove = async (cid: string) => { + const data: UncheckedFileCacheEntry = deserialize( + await filepathCache.get(cid), + ) + if (!data) { + return + } + + const path = cidToFilePath(cid) + await Promise.all([filepathCache.del(cid), fsPromises.rm(path)]) + } + + filepathCache.on('del', async ({ key, error }) => { + if (error) { + console.error(`Error deleting file cache entry for ${key}: ${error}`) + } else { + await fsPromises.rm(cidToFilePath(key)) + } + }) + + const clear = async () => { + await filepathCache.clear() + await fsPromises.rm(config.cacheDir, { recursive: true }) + } + + const cache = { + get, + set, + remove, + clear, + } + + return cache +} diff --git a/backend/src/services/download/fsCache/singleton.ts b/backend/src/services/download/fsCache/singleton.ts new file mode 100644 index 00000000..1dc4821d --- /dev/null +++ b/backend/src/services/download/fsCache/singleton.ts @@ -0,0 +1,32 @@ +import { createFileCache } from './index.js' +import { ensureDirectoryExists } from '../../../utils/fs.js' +import path from 'path' +import { config } from '../../../config' +import { Keyv } from 'keyv' +import KeyvSqlite from '@keyvhq/sqlite' +import { LRUCache } from 'lru-cache' + +export const fsCache = createFileCache({ + cacheDir: ensureDirectoryExists(path.join(config.cacheDir, 'files')), + pathPartitions: 3, + stores: [ + new Keyv({ + serialize: (value) => JSON.stringify(value), + store: new LRUCache({ + maxSize: config.cacheMaxSize, + maxEntrySize: Number.MAX_SAFE_INTEGER, + sizeCalculation: (value) => { + const { value: parsedValue } = JSON.parse(value) + return Number(parsedValue?.size ?? 0) + }, + }), + }), + new Keyv({ + store: new KeyvSqlite({ + uri: path.join(ensureDirectoryExists(config.cacheDir), 'files.sqlite'), + }), + ttl: config.cacheTtl, + serialize: (value) => JSON.stringify(value), + }), + ], +}) diff --git a/backend/src/services/download/fsCache/types.ts b/backend/src/services/download/fsCache/types.ts new file mode 100644 index 00000000..11a84269 --- /dev/null +++ b/backend/src/services/download/fsCache/types.ts @@ -0,0 +1,22 @@ +import { Stream } from 'stream' +import { Keyv } from 'keyv' + +export interface BaseCacheConfig { + pathPartitions: number + cacheDir: string + stores: Keyv[] +} + +export interface FileCache { + get: (cid: string) => Promise + set: (cid: string, data: Buffer | Stream) => Promise + remove: (cid: string) => Promise +} + +export type FileResponse = { + data: AsyncIterable + mimeType?: string + filename?: string + size?: bigint + encoding?: string +} diff --git a/backend/src/services/download/index.ts b/backend/src/services/download/index.ts index 3c621ec9..8469edeb 100644 --- a/backend/src/services/download/index.ts +++ b/backend/src/services/download/index.ts @@ -1,21 +1,27 @@ import { logger } from '../../drivers/logger.js' import { FilesUseCases, ObjectUseCases } from '../../useCases/index.js' -import { databaseDownloadCache } from './databaseDownloadCache/index.js' import { memoryDownloadCache } from './memoryDownloadCache/index.js' import { AwaitIterable } from 'interface-store' +import { fsCache } from './fsCache/singleton.js' +import { forkAsyncIterable } from '../../utils/async.js' export const downloadService = { download: async (cid: string): Promise> => { - if (memoryDownloadCache.has(cid)) { + const file = memoryDownloadCache.get(cid) + if (file != null) { logger.debug('Downloading file from memory', cid) - return memoryDownloadCache.get(cid)! + const [stream1, stream2] = await forkAsyncIterable(file) + await memoryDownloadCache.set(cid, stream1) + + return stream2 } - if (await databaseDownloadCache.has(cid)) { - logger.debug('Downloading file from database', cid) - let data = databaseDownloadCache.get(cid)! - data = memoryDownloadCache.set(cid, data) - return data + const cachedFile = await fsCache.get(cid) + if (cachedFile != null) { + logger.debug('Reading file from file system cache', cid) + const [stream1, stream2] = await forkAsyncIterable(cachedFile.data) + await memoryDownloadCache.set(cid, stream1) + return stream2 } const metadata = await ObjectUseCases.getMetadata(cid) @@ -23,12 +29,16 @@ export const downloadService = { throw new Error('Not found') } - let data = await FilesUseCases.retrieveObject(metadata) - - data = await databaseDownloadCache.set(cid, data, metadata.totalSize) + const data = await FilesUseCases.retrieveObject(metadata) - data = memoryDownloadCache.set(cid, data) + const [stream1, stream2] = await forkAsyncIterable(data) + await fsCache.set(cid, { + data: stream1, + size: metadata.totalSize, + }) + const [stream3, stream4] = await forkAsyncIterable(stream2) + await memoryDownloadCache.set(cid, stream3) - return data + return stream4 }, } diff --git a/backend/src/services/download/memoryDownloadCache/index.ts b/backend/src/services/download/memoryDownloadCache/index.ts index 796456ff..ab594832 100644 --- a/backend/src/services/download/memoryDownloadCache/index.ts +++ b/backend/src/services/download/memoryDownloadCache/index.ts @@ -1,5 +1,8 @@ import { LRUCache } from 'lru-cache' -import { bufferToAsyncIterable } from '../../../utils/async.js' +import { + asyncIterableToBuffer, + bufferToAsyncIterable, +} from '../../../utils/async.js' import { AwaitIterable } from 'interface-store' import { config } from '../../../config.js' @@ -21,15 +24,11 @@ const get = (cid: string) => { return bufferToAsyncIterable(value) } -const set = async function* ( +const set = async ( cid: string, value: AwaitIterable, -): AsyncIterable { - let buffer = Buffer.alloc(0) - for await (const chunk of value) { - buffer = Buffer.concat([buffer, chunk]) - yield chunk - } +): Promise => { + const buffer = await asyncIterableToBuffer(value) if (buffer.length > 0) { cache.set(cid, buffer, { diff --git a/backend/src/utils/async.ts b/backend/src/utils/async.ts index e96ade0a..aebec51a 100644 --- a/backend/src/utils/async.ts +++ b/backend/src/utils/async.ts @@ -1,4 +1,5 @@ import { AwaitIterable } from 'interface-store' +import { PassThrough, Readable } from 'stream' export const asyncIterableMap = async ( iterable: AwaitIterable, @@ -48,6 +49,16 @@ export const bufferToAsyncIterable = ( })() } +export const asyncIterableToBuffer = async ( + iterable: AwaitIterable, +): Promise => { + let buffer = Buffer.alloc(0) + for await (const chunk of iterable) { + buffer = Buffer.concat([buffer, chunk]) + } + return buffer +} + export const asyncByChunk = async function* ( iterable: AwaitIterable, chunkSize: number, @@ -66,3 +77,21 @@ export const asyncByChunk = async function* ( yield accumulated } } + +export async function forkAsyncIterable( + asyncIterable: AwaitIterable, +): Promise<[Readable, Readable]> { + const passThrough1 = new PassThrough() + const passThrough2 = new PassThrough() + + ;(async () => { + for await (const chunk of asyncIterable) { + passThrough1.write(chunk) + passThrough2.write(chunk) + } + passThrough1.end() + passThrough2.end() + })() + + return [passThrough1, passThrough2] +} diff --git a/backend/src/utils/fs.ts b/backend/src/utils/fs.ts new file mode 100644 index 00000000..c9462ca7 --- /dev/null +++ b/backend/src/utils/fs.ts @@ -0,0 +1,28 @@ +import fsPromises from 'fs/promises' +import fs from 'fs' +import path from 'path' + +export const writeFile = async ( + filepath: string, + data: AsyncIterable, + ensureDirectoryExistance: boolean = true, +) => { + const tempFilePath = `${filepath}.tmp` + + if (ensureDirectoryExistance) { + await fsPromises.mkdir(path.dirname(tempFilePath), { recursive: true }) + } + + await fsPromises.writeFile(tempFilePath, data) + await fsPromises.rename(tempFilePath, filepath) +} + +export const ensureDirectoryExists = (dir: string) => { + fs.mkdirSync(dir, { recursive: true }) + return dir +} + +export const asyncEnsureDirectoryExists = async (dir: string) => { + await fsPromises.mkdir(dir, { recursive: true }) + return dir +} diff --git a/backend/yarn.lock b/backend/yarn.lock index e1986f85..df81691d 100644 --- a/backend/yarn.lock +++ b/backend/yarn.lock @@ -424,6 +424,13 @@ wrap-ansi "^8.1.0" wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" +"@isaacs/fs-minipass@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz#2d59ae3ab4b38fb4270bfa23d30f8e2e86c7fe32" + integrity sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w== + dependencies: + minipass "^7.0.4" + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -686,6 +693,29 @@ dependencies: lodash "^4.17.21" +"@keyv/serialize@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@keyv/serialize/-/serialize-1.0.2.tgz#72507c4be94d8914434a4aa80661f8ac6131967f" + integrity sha512-+E/LyaAeuABniD/RvUezWVXKpeuvwLEA9//nE9952zBaOdBd2mQ3pPoM8cUe2X6IcMByfuSLzmYqnYshG60+HQ== + dependencies: + buffer "^6.0.3" + +"@keyvhq/sql@^2.1.6": + version "2.1.6" + resolved "https://registry.yarnpkg.com/@keyvhq/sql/-/sql-2.1.6.tgz#f4932129bcf75195cdc3e3593999b7ba78ed18eb" + integrity sha512-eWE5+Wj218JA/do66CcETFtVAsk7bT6U6ke7gzUyGlw6v6aEQn+HKQJdxo4ijqP6izg7aiec8xJdIJb14Ru9MQ== + dependencies: + sql-ts "7" + +"@keyvhq/sqlite@^2.1.6": + version "2.1.6" + resolved "https://registry.yarnpkg.com/@keyvhq/sqlite/-/sqlite-2.1.6.tgz#7f28978bb6b7017988d4f16a536bfd1f282e2660" + integrity sha512-m1IBJFIVFMBSTDi8VMYrT0OksAM9qDbXOGse4+1FXDtrDTIJMPU0D1IuLb3w8K3MzblUECwZ1AtvJmZ9w2xifg== + dependencies: + "@keyvhq/sql" "^2.1.6" + "@vscode/sqlite3" "~5.1.2" + pify "~5.0.0" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.5" resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz#4fc56c15c580b9adb7dc3c333a134e540b44bfb1" @@ -1850,6 +1880,13 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.3.0.tgz#d06bbb384ebcf6c505fde1c3d0ed4ddffe0aaff8" integrity sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g== +"@vscode/sqlite3@~5.1.2": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@vscode/sqlite3/-/sqlite3-5.1.2.tgz#3b22f2bd8681a7193f2169b407ae364b7b2c2dec" + integrity sha512-XE2p2bqBEBZkbLxPcdRwwsyhgttbkLiD3LljJPnIuShjum5ZC50DoheFlKzEN5mWkZBY5rrfspnXDYb8amzB1w== + dependencies: + node-addon-api "^4.2.0" + "@webbuf/blake3@^3.0.26": version "3.0.26" resolved "https://registry.yarnpkg.com/@webbuf/blake3/-/blake3-3.0.26.tgz#c499cc6a51296a8747bd2b45ef339e57a167507b" @@ -2347,6 +2384,13 @@ bytes@3.1.2: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== +cache-manager@^6.4.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/cache-manager/-/cache-manager-6.4.0.tgz#a75b86f080d69582deb37230041bc7acdef2c9f0" + integrity sha512-eUmPyVqQYzWCt7hx1QrYzQ7oC3MGKM1etxxe8zuq1o7IB4NzdBeWcUGDSWYahaI8fkd538SEZRGadyZWQfvOzQ== + dependencies: + keyv "^5.2.3" + call-bind-apply-helpers@^1.0.0, call-bind-apply-helpers@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz#32e5892e6361b29b0b545ba6f7763378daca2840" @@ -2418,6 +2462,11 @@ chownr@^1.1.1: resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== +chownr@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" + integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== + ci-info@^3.2.0: version "3.9.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" @@ -3496,7 +3545,7 @@ glob-parent@^6.0.2: dependencies: is-glob "^4.0.3" -glob@^10.0.0: +glob@^10.0.0, glob@^10.3.7: version "10.4.5" resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== @@ -4435,6 +4484,13 @@ keyv@^4.5.3: dependencies: json-buffer "3.0.1" +keyv@^5.2.1, keyv@^5.2.3: + version "5.2.3" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-5.2.3.tgz#32db1a4aa8d05e2b8ab82688a57ddc5d2184a25c" + integrity sha512-AGKecUfzrowabUv0bH1RIR5Vf7w+l4S3xtQAypKaUpTdIR1EbrAcTxHCrpo9Q+IWeUlFE2palRtgIQcgm+PQJw== + dependencies: + "@keyv/serialize" "^1.0.2" + klaw@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/klaw/-/klaw-3.0.0.tgz#b11bec9cf2492f06756d6e809ab73a2910259146" @@ -4585,7 +4641,7 @@ long@^5.0.0: resolved "https://registry.yarnpkg.com/long/-/long-5.2.4.tgz#ee651d5c7c25901cfca5e67220ae9911695e99b2" integrity sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg== -lru-cache@*: +lru-cache@*, lru-cache@^11.0.2: version "11.0.2" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-11.0.2.tgz#fbd8e7cf8211f5e7e5d91905c415a3f55755ca39" integrity sha512-123qHRfJBmo2jXDbo/a5YOQrJoHF/GNQTLzQ5+IdK5pWpceK17yRc6ozlWd25FxvGKQbIUs91fDFkXmDHTKcyA== @@ -4739,11 +4795,19 @@ minimist@^1.2.0, minimist@^1.2.6: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4, minipass@^7.1.2: version "7.1.2" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== +minizlib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-3.0.1.tgz#46d5329d1eb3c83924eff1d3b858ca0a31581012" + integrity sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg== + dependencies: + minipass "^7.0.4" + rimraf "^5.0.5" + mkdirp-classic@^0.5.2: version "0.5.3" resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" @@ -4761,6 +4825,11 @@ mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + mock-socket@^9.3.1: version "9.3.1" resolved "https://registry.yarnpkg.com/mock-socket/-/mock-socket-9.3.1.tgz#24fb00c2f573c84812aa4a24181bb025de80cc8e" @@ -4838,6 +4907,11 @@ nock@^13.5.4: json-stringify-safe "^5.0.1" propagate "^2.0.0" +node-addon-api@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-4.3.0.tgz#52a1a0b475193e0928e98e0426a0d1254782b77f" + integrity sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ== + node-domexception@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" @@ -5168,6 +5242,11 @@ picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== +pify@~5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-5.0.0.tgz#1f5eca3f5e87ebec28cc6d54a0e4aaf00acc127f" + integrity sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA== + pirates@^4.0.4: version "4.0.6" resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" @@ -5596,6 +5675,13 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" +rimraf@^5.0.5: + version "5.0.10" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.10.tgz#23b9843d3dc92db71f96e1a2ce92e39fd2a8221c" + integrity sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ== + dependencies: + glob "^10.3.7" + run-parallel@^1.1.9: version "1.2.0" resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" @@ -5804,6 +5890,11 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== +sliced@~1.0.x: + version "1.0.1" + resolved "https://registry.yarnpkg.com/sliced/-/sliced-1.0.1.tgz#0b3a662b5d04c3177b1926bea82b03f837a2ef41" + integrity sha512-VZBmZP8WU3sMOZm1bdgTadsQbcscK0UM8oKxKVBs4XAhUo2Xxzm/OFMGBkPusxw9xL3Uy8LrzEqGqJhclsr0yA== + smoldot@2.0.26: version "2.0.26" resolved "https://registry.yarnpkg.com/smoldot/-/smoldot-2.0.26.tgz#0e64c7fcd26240fbe4c8d6b6e4b9a9aca77e00f6" @@ -5839,6 +5930,14 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== +sql-ts@7: + version "7.1.0" + resolved "https://registry.yarnpkg.com/sql-ts/-/sql-ts-7.1.0.tgz#868f37b87adc2a865ca79a02830aa15861af998b" + integrity sha512-1BZdFfFqPJ2m0Ah8KDm04Pc/MVoQP+3CLhcTaCgba2kJ5eZ/A19AmSEmJrHmD30Ciu7FRguGhNoj8RhZT0r9Dw== + dependencies: + lodash "^4.17.21" + sliced "~1.0.x" + ssh-remote-port-forward@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/ssh-remote-port-forward/-/ssh-remote-port-forward-1.0.4.tgz#72b0c5df8ec27ca300c75805cc6b266dee07e298" @@ -6072,6 +6171,18 @@ tar-stream@^3.0.0, tar-stream@^3.1.5: fast-fifo "^1.2.0" streamx "^2.15.0" +tar@^7.4.3: + version "7.4.3" + resolved "https://registry.yarnpkg.com/tar/-/tar-7.4.3.tgz#88bbe9286a3fcd900e94592cda7a22b192e80571" + integrity sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw== + dependencies: + "@isaacs/fs-minipass" "^4.0.0" + chownr "^3.0.0" + minipass "^7.1.2" + minizlib "^3.0.1" + mkdirp "^3.0.1" + yallist "^5.0.0" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -6614,6 +6725,11 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== +yallist@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" + integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== + yaml@^2.2.2: version "2.7.0" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.7.0.tgz#aef9bb617a64c937a9a748803786ad8d3ffe1e98"