Skip to content

Commit

Permalink
Merge pull request #149 from autonomys/update-persistent-cache
Browse files Browse the repository at this point in the history
Update persistent file cache
  • Loading branch information
clostao authored Jan 27, 2025
2 parents cbf9b3d + 67fd440 commit ab17248
Show file tree
Hide file tree
Showing 18 changed files with 472 additions and 266 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,6 @@ coverage/
*.tmp
*.temp

.cache/
database.sqlite
.cursorrules
12 changes: 6 additions & 6 deletions backend/__tests__/e2e/uploads/files.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ import {
nodesRepository,
} from '../../../src/repositories/index.js'
import { InteractionType } from '../../../src/models/objects/interactions.js'
import { databaseDownloadCache } from '../../../src/services/download/databaseDownloadCache/index.js'
import { memoryDownloadCache } from '../../../src/services/download/memoryDownloadCache/index.js'
import {
OwnerRole,
Expand All @@ -38,6 +37,7 @@ import {
import { FileGateway } from '../../../src/services/dsn/fileGateway/index.js'
import { jest } from '@jest/globals'
import { downloadService } from '../../../src/services/download/index.js'
import { fsCache } from '../../../src/services/download/fsCache/singleton.js'
import { handleArchivedObjects } from '../../../src/services/upload/nodeRemover/index.js'

const files = [
Expand Down Expand Up @@ -223,10 +223,10 @@ files.map((file, index) => {
})

it('download cache should be updated', async () => {
const asyncFromDatabase = await databaseDownloadCache.get(cid)
const asyncFromDatabase = await fsCache.get(cid)
expect(asyncFromDatabase).not.toBeNull()
const fileArrayFromDatabase = await asyncIterableToPromiseOfArray(
asyncFromDatabase!,
asyncFromDatabase!.data,
)
const fileBufferFromDatabase = Buffer.concat(fileArrayFromDatabase)
expect(fileBufferFromDatabase).toEqual(rndBuffer)
Expand Down Expand Up @@ -323,11 +323,11 @@ files.map((file, index) => {
expect(metadata?.is_archived).toBe(true)

expect(memoryDownloadCache.has(cid)).toBe(true)
expect(await databaseDownloadCache.has(cid)).toBe(true)
expect(fsCache.get(cid)).not.toBeNull()
})

it('should be able to downloaded from gateway', async () => {
await databaseDownloadCache.clear()
it('should be able to remove the nodes', async () => {
await fsCache.clear()
await memoryDownloadCache.clear()

const downloadFileMock = jest
Expand Down
53 changes: 53 additions & 0 deletions backend/migrations/20250127141651-delete-download-cache-schema.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
'use strict';

var dbm;
var type;
var seed;
var fs = require('fs');
var path = require('path');
var Promise;

/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
Promise = options.Promise;
};

exports.up = function(db) {
var filePath = path.join(__dirname, 'sqls', '20250127141651-delete-download-cache-schema-up.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);

resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};

exports.down = function(db) {
var filePath = path.join(__dirname, 'sqls', '20250127141651-delete-download-cache-schema-down.sql');
return new Promise( function( resolve, reject ) {
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (err) return reject(err);
console.log('received data: ' + data);

resolve(data);
});
})
.then(function(data) {
return db.runSql(data);
});
};

exports._meta = {
"version": 1
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
CREATE SCHEMA IF NOT EXISTS download_cache;

CREATE TABLE IF NOT EXISTS download_cache.file_parts (
"cid" TEXT,
"index" INTEGER NOT NULL,
"data" BYTEA NOT NULL,
PRIMARY KEY (cid, index)
);

CREATE TABLE IF NOT EXISTS download_cache.registry (
"cid" TEXT PRIMARY KEY,
"last_accessed_at" TIMESTAMP WITH TIME ZONE NOT NULL,
"size" BIGINT NOT NULL
);
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
DROP TABLE IF EXISTS download_cache.file_parts;
DROP TABLE IF EXISTS download_cache.registry;
DROP SCHEMA IF EXISTS download_cache;
5 changes: 5 additions & 0 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,27 @@
"dependencies": {
"@autonomys/auto-dag-data": "^1.0.8",
"@autonomys/auto-drive": "^1.0.5",
"@keyvhq/sqlite": "^2.1.6",
"@polkadot/api": "^12.3.1",
"@polkadot/types": "^13.0.1",
"@polkadot/util-crypto": "^13.0.2",
"aws-sdk": "^2.1692.0",
"body-parser": "^1.20.2",
"cache-manager": "^6.4.0",
"cors": "^2.8.5",
"db-migrate": "^0.11.14",
"db-migrate-pg": "^1.5.2",
"dotenv": "^16.4.5",
"express": "^4.19.2",
"jsonwebtoken": "^9.0.2",
"keyv": "^5.2.1",
"lru-cache": "^11.0.2",
"multer": "^1.4.5-lts.1",
"multiformats": "^13.2.2",
"pg": "^8.13.0",
"pg-format": "^1.0.4",
"pizzip": "^3.1.7",
"tar": "^7.4.3",
"uuid": "^10.0.0",
"websocket": "^1.0.35",
"winston": "^3.17.0",
Expand Down
20 changes: 8 additions & 12 deletions backend/src/config.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { env } from './utils/misc.js'

const DEFAULT_CHUNK_SIZE = 10 * 1024 ** 2
const DEFAULT_MAX_CACHE_SIZE = BigInt(10 * 1024 ** 3)

const DEFAULT_CACHE_MAX_SIZE = 10 * 1024 ** 3 // 10GB
const DEFAULT_CACHE_TTL = 1000000 // 1000000 seconds

export const config = {
logLevel: env('LOG_LEVEL', 'info'),
postgres: {
Expand All @@ -12,17 +14,6 @@ export const config = {
requestSizeLimit: env('REQUEST_SIZE_LIMIT', '200mb'),
corsAllowedOrigins: process.env.CORS_ALLOWED_ORIGINS,
rpcEndpoint: env('RPC_ENDPOINT', 'ws://localhost:9944'),
databaseDownloadCache: {
chunkSize: Number(
env('DATABASE_DOWNLOAD_CACHE_CHUNK_SIZE', DEFAULT_CHUNK_SIZE.toString()),
),
maxCacheSize: BigInt(
env(
'DATABASE_DOWNLOAD_CACHE_MAX_SIZE',
DEFAULT_MAX_CACHE_SIZE.toString(),
),
),
},
memoryDownloadCache: {
maxCacheSize: Number(
env('MEMORY_DOWNLOAD_CACHE_MAX_SIZE', DEFAULT_MAX_CACHE_SIZE.toString()),
Expand All @@ -36,4 +27,9 @@ export const config = {
url: env('AUTH_SERVICE_URL', 'http://localhost:3030'),
token: env('AUTH_SERVICE_API_KEY'),
},
cacheDir: env('CACHE_DIR', './.cache'),
cacheMaxSize: Number(
env('CACHE_MAX_SIZE', DEFAULT_CACHE_MAX_SIZE.toString()),
),
cacheTtl: Number(env('CACHE_TTL', DEFAULT_CACHE_TTL.toString())),
}
58 changes: 0 additions & 58 deletions backend/src/repositories/cache/fileParts.ts

This file was deleted.

74 changes: 0 additions & 74 deletions backend/src/repositories/cache/registry.ts

This file was deleted.

Loading

0 comments on commit ab17248

Please sign in to comment.