Skip to content

Commit

Permalink
fix: a lot of stuff (#683)
Browse files Browse the repository at this point in the history
* fix: No more infinite loading button! :D

* chore: buhbai version!

* chore: update browserlist db

* fix: a totp secret that shouldn't be /probably/ shouldn't be revealed

* fix: revert range getting for datasource

* chore: a line lost! :O

* chore: this probably should've been ignored for a long while

* fix: Don't compress webm or webp. They go breaky

* fix: issue 659, it was the wrong statusCode to look for

* fix: I'll just regex it.

* fix: let s3 in on the fun with partial uploads

* chore&fix: they're files now :3 & unlock video and/or audio files

* fix: Maybe prisma plugin needs a return?

* fix: super focused regex this time :D

* I guess this works? So cool :O

* fix: bad id check

* fix: Byte me! >:3

* fix: add password bool to file's prop

* fix(?): this might fix some people's weard errors.

* chore: I discovered more typing

* fix: stats logger

* fix(?): await the registers

* chore: typeer typer

* fix: This looks to properly fix issue 659. I dunno how, don't ask

* More like uglier >:(

* fix: actions don't like dis

* fix: ranged requests handled properly

* feat: remove supabase datasource

---------

Co-authored-by: diced <[email protected]>
  • Loading branch information
TacticalTechJay and diced authored Feb 3, 2025
1 parent 41e197e commit 1febd5a
Show file tree
Hide file tree
Showing 37 changed files with 296 additions and 370 deletions.
9 changes: 1 addition & 8 deletions .env.local.example
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# every field in here is optional except, CORE_SECRET and CORE_DATABASE_URL.
# if CORE_SECRET is still "changethis" then zipline will exit and tell you to change it.

# if using s3/supabase make sure to uncomment or comment out the correct lines needed.
# if using s3 make sure to uncomment or comment out the correct lines needed.

CORE_RETURN_HTTPS=true
CORE_SECRET="changethis"
Expand All @@ -27,13 +27,6 @@ DATASOURCE_LOCAL_DIRECTORY=./uploads
# DATASOURCE_S3_FORCE_S3_PATH=false
# DATASOURCE_S3_USE_SSL=false

# or supabase
# DATASOURCE_TYPE=supabase
# DATASOURCE_SUPABASE_KEY=xxx
# remember: no leading slash
# DATASOURCE_SUPABASE_URL=https://something.supabase.co
# DATASOURCE_SUPABASE_BUCKET=zipline

UPLOADER_DEFAULT_FORMAT=RANDOM
UPLOADER_ROUTE=/u
UPLOADER_LENGTH=6
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ yarn-debug.log*
yarn-error.log*

# local env files
.env
.env.local
.env.development.local
.env.test.local
Expand Down
1 change: 0 additions & 1 deletion docker-compose.dev.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3'
services:
postgres:
image: postgres:15
Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3'
services:
postgres:
image: postgres:15
Expand Down
2 changes: 1 addition & 1 deletion src/components/pages/Users/UserFiles.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { ActionIcon, Button, Center, Group, SimpleGrid, Title } from '@mantine/core';
import { File } from '@prisma/client';
import type { File } from '@prisma/client';
import { IconArrowLeft, IconFile } from '@tabler/icons-react';
import FileComponent from 'components/File';
import MutedText from 'components/MutedText';
Expand Down
9 changes: 1 addition & 8 deletions src/lib/config/Config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@ export interface ConfigCompression {
}

export interface ConfigDatasource {
type: 'local' | 's3' | 'supabase';
type: 'local' | 's3';
local: ConfigLocalDatasource;
s3?: ConfigS3Datasource;
supabase?: ConfigSupabaseDatasource;
}

export interface ConfigLocalDatasource {
Expand All @@ -41,12 +40,6 @@ export interface ConfigS3Datasource {
region?: string;
}

export interface ConfigSupabaseDatasource {
url: string;
key: string;
bucket: string;
}

export interface ConfigUploader {
default_format: string;
route: string;
Expand Down
4 changes: 0 additions & 4 deletions src/lib/config/readConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,6 @@ export default function readConfig() {
map('DATASOURCE_S3_REGION', 'string', 'datasource.s3.region'),
map('DATASOURCE_S3_USE_SSL', 'boolean', 'datasource.s3.use_ssl'),

map('DATASOURCE_SUPABASE_URL', 'string', 'datasource.supabase.url'),
map('DATASOURCE_SUPABASE_KEY', 'string', 'datasource.supabase.key'),
map('DATASOURCE_SUPABASE_BUCKET', 'string', 'datasource.supabase.bucket'),

map('UPLOADER_DEFAULT_FORMAT', 'string', 'uploader.default_format'),
map('UPLOADER_ROUTE', 'string', 'uploader.route'),
map('UPLOADER_LENGTH', 'number', 'uploader.length'),
Expand Down
45 changes: 13 additions & 32 deletions src/lib/config/validateConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ const validator = s.object({
}),
datasource: s
.object({
type: s.enum('local', 's3', 'supabase').default('local'),
type: s.enum('local', 's3').default('local'),
local: s
.object({
directory: s.string.default(resolve('./uploads')).transform((v) => resolve(v)),
Expand All @@ -69,11 +69,6 @@ const validator = s.object({
region: s.string.default('us-east-1'),
use_ssl: s.boolean.default(false),
}).optional,
supabase: s.object({
url: s.string,
key: s.string,
bucket: s.string,
}).optional,
})
.default({
type: 'local',
Expand Down Expand Up @@ -253,43 +248,29 @@ export default function validate(config): Config {
logger.debug(`Attemping to validate ${JSON.stringify(config)}`);
const validated = validator.parse(config);
logger.debug(`Recieved config: ${JSON.stringify(validated)}`);
switch (validated.datasource.type) {
case 's3': {
const errors = [];
if (!validated.datasource.s3.access_key_id)
errors.push('datasource.s3.access_key_id is a required field');
if (!validated.datasource.s3.secret_access_key)
errors.push('datasource.s3.secret_access_key is a required field');
if (!validated.datasource.s3.bucket) errors.push('datasource.s3.bucket is a required field');
if (!validated.datasource.s3.endpoint) errors.push('datasource.s3.endpoint is a required field');
if (errors.length) throw { errors };
break;
}
case 'supabase': {
const errors = [];

if (!validated.datasource.supabase.key) errors.push('datasource.supabase.key is a required field');
if (!validated.datasource.supabase.url) errors.push('datasource.supabase.url is a required field');
if (!validated.datasource.supabase.bucket)
errors.push('datasource.supabase.bucket is a required field');
if (errors.length) throw { errors };

break;
}
if (validated.datasource.type === 's3') {
const errors = [];
if (!validated.datasource.s3.access_key_id)
errors.push('datasource.s3.access_key_id is a required field');
if (!validated.datasource.s3.secret_access_key)
errors.push('datasource.s3.secret_access_key is a required field');
if (!validated.datasource.s3.bucket) errors.push('datasource.s3.bucket is a required field');
if (!validated.datasource.s3.endpoint) errors.push('datasource.s3.endpoint is a required field');
if (errors.length) throw { errors };
}

const reserved = ['/view', '/dashboard', '/code', '/folder', '/api', '/auth', '/r'];
if (reserved.some((r) => validated.uploader.route.startsWith(r))) {
const reserved = new RegExp(/^\/(view|code|folder|auth|r)(\/\S*)?$|^\/(api|dashboard)(\/\S*)*/);
if (reserved.exec(validated.uploader.route))
throw {
errors: [`The uploader route cannot be ${validated.uploader.route}, this is a reserved route.`],
show: true,
};
} else if (reserved.some((r) => validated.urls.route.startsWith(r))) {
if (reserved.exec(validated.urls.route))
throw {
errors: [`The urls route cannot be ${validated.urls.route}, this is a reserved route.`],
show: true,
};
}

return validated as unknown as Config;
} catch (e) {
Expand Down
6 changes: 1 addition & 5 deletions src/lib/datasource.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import config from './config';
import { Datasource, Local, S3, Supabase } from './datasources';
import { Datasource, Local, S3 } from './datasources';
import Logger from './logger';

const logger = Logger.get('datasource');
Expand All @@ -14,10 +14,6 @@ if (!global.datasource) {
global.datasource = new Local(config.datasource.local.directory);
logger.info(`using Local(${config.datasource.local.directory}) datasource`);
break;
case 'supabase':
global.datasource = new Supabase(config.datasource.supabase);
logger.info(`using Supabase(${config.datasource.supabase.bucket}) datasource`);
break;
default:
throw new Error('Invalid datasource type');
}
Expand Down
3 changes: 2 additions & 1 deletion src/lib/datasources/Datasource.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export abstract class Datasource {
public abstract delete(file: string): Promise<void>;
public abstract clear(): Promise<void>;
public abstract size(file: string): Promise<number | null>;
public abstract get(file: string, start?: number, end?: number): Readable | Promise<Readable>;
public abstract get(file: string): Readable | Promise<Readable>;
public abstract fullSize(): Promise<number>;
public abstract range(file: string, start: number, end: number): Promise<Readable>;
}
13 changes: 10 additions & 3 deletions src/lib/datasources/Local.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export class Local extends Datasource {
}

public async save(file: string, data: Buffer): Promise<void> {
await writeFile(join(this.path, file), data);
await writeFile(join(this.path, file), Uint8Array.from(data));
}

public async delete(file: string): Promise<void> {
Expand All @@ -26,12 +26,12 @@ export class Local extends Datasource {
}
}

public get(file: string, start: number = 0, end: number = Infinity): ReadStream {
public get(file: string): ReadStream {
const full = join(this.path, file);
if (!existsSync(full)) return null;

try {
return createReadStream(full, { start, end });
return createReadStream(full);
} catch (e) {
return null;
}
Expand All @@ -56,4 +56,11 @@ export class Local extends Datasource {

return size;
}

public async range(file: string, start: number, end: number): Promise<ReadStream> {
const path = join(this.path, file);
const readStream = createReadStream(path, { start, end });

return readStream;
}
}
42 changes: 19 additions & 23 deletions src/lib/datasources/S3.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Datasource } from '.';
import { Readable } from 'stream';
import { PassThrough, Readable } from 'stream';
import { ConfigS3Datasource } from 'lib/config/Config';
import { BucketItemStat, Client } from 'minio';

Expand All @@ -24,7 +24,8 @@ export class S3 extends Datasource {
await this.s3.putObject(
this.config.bucket,
file,
data,
new PassThrough().end(data),
data.byteLength,
options ? { 'Content-Type': options.type } : undefined,
);
}
Expand All @@ -45,28 +46,12 @@ export class S3 extends Datasource {
});
}

public get(file: string, start: number = 0, end: number = Infinity): Promise<Readable> {
if (start === 0 && end === Infinity) {
return new Promise((res) => {
this.s3.getObject(this.config.bucket, file, (err, stream) => {
if (err) res(null);
else res(stream);
});
});
}

public get(file: string): Promise<Readable> {
return new Promise((res) => {
this.s3.getPartialObject(
this.config.bucket,
file,
start,
// undefined means to read the rest of the file from the start (offset)
end === Infinity ? undefined : end,
(err, stream) => {
if (err) res(null);
else res(stream);
},
);
this.s3.getObject(this.config.bucket, file, (err, stream) => {
if (err) res(null);
else res(stream);
});
});
}

Expand Down Expand Up @@ -96,4 +81,15 @@ export class S3 extends Datasource {
});
});
}

public async range(file: string, start: number, end: number): Promise<Readable> {
return new Promise((res) => {
this.s3.getPartialObject(this.config.bucket, file, start, end, (err, stream) => {
if (err) {
console.log(err);
res(null);
} else res(stream);
});
});
}
}
Loading

0 comments on commit 1febd5a

Please sign in to comment.