feat: string bytes/ms settings so they wont overflow

This commit is contained in:
diced
2025-01-08 22:15:27 -08:00
parent 49fb0434bd
commit 9a117d7032
13 changed files with 75 additions and 66 deletions

View File

@@ -0,0 +1,17 @@
-- AlterTable
ALTER TABLE "Zipline" ALTER COLUMN "chunksMax" SET DEFAULT '95mb',
ALTER COLUMN "chunksMax" SET DATA TYPE TEXT,
ALTER COLUMN "chunksSize" SET DEFAULT '25mb',
ALTER COLUMN "chunksSize" SET DATA TYPE TEXT,
ALTER COLUMN "tasksDeleteInterval" SET DEFAULT '30m',
ALTER COLUMN "tasksDeleteInterval" SET DATA TYPE TEXT,
ALTER COLUMN "tasksClearInvitesInterval" SET DEFAULT '30m',
ALTER COLUMN "tasksClearInvitesInterval" SET DATA TYPE TEXT,
ALTER COLUMN "tasksMaxViewsInterval" SET DEFAULT '30m',
ALTER COLUMN "tasksMaxViewsInterval" SET DATA TYPE TEXT,
ALTER COLUMN "tasksThumbnailsInterval" SET DEFAULT '30m',
ALTER COLUMN "tasksThumbnailsInterval" SET DATA TYPE TEXT,
ALTER COLUMN "tasksMetricsInterval" SET DEFAULT '30m',
ALTER COLUMN "tasksMetricsInterval" SET DATA TYPE TEXT,
ALTER COLUMN "filesMaxFileSize" SET DEFAULT '100mb',
ALTER COLUMN "filesMaxFileSize" SET DATA TYPE TEXT;

View File

@@ -20,20 +20,20 @@ model Zipline {
coreTempDirectory String // default join(tmpdir(), 'zipline')
chunksEnabled Boolean @default(true)
chunksMax Int @default(99614720)
chunksSize Int @default(26214400)
chunksMax String @default("95mb")
chunksSize String @default("25mb")
tasksDeleteInterval Int @default(1800000)
tasksClearInvitesInterval Int @default(1800000)
tasksMaxViewsInterval Int @default(1800000)
tasksThumbnailsInterval Int @default(1800000)
tasksMetricsInterval Int @default(1800000)
tasksDeleteInterval String @default("30m")
tasksClearInvitesInterval String @default("30m")
tasksMaxViewsInterval String @default("30m")
tasksThumbnailsInterval String @default("30m")
tasksMetricsInterval String @default("30m")
filesRoute String @default("/u")
filesLength Int @default(6)
filesDefaultFormat String @default("random")
filesDisabledExtensions String[]
filesMaxFileSize Int @default(104857600)
filesMaxFileSize String @default("100mb")
filesDefaultExpiration String?
filesAssumeMimetypes Boolean @default(false)
filesDefaultDateFormat String @default("YYYY-MM-DD_HH:mm:ss")

View File

@@ -1,5 +1,4 @@
import { Response } from '@/lib/api/response';
import { bytes } from '@/lib/bytes';
import { Button, LoadingOverlay, Paper, SimpleGrid, Switch, TextInput, Title } from '@mantine/core';
import { useForm } from '@mantine/form';
import { IconDeviceFloppy } from '@tabler/icons-react';
@@ -28,8 +27,8 @@ export default function ServerSettingsChunks({
form.setValues({
chunksEnabled: data?.chunksEnabled ?? true,
chunksMax: bytes(data!.chunksMax),
chunksSize: bytes(data!.chunksSize),
chunksMax: data!.chunksMax ?? '',
chunksSize: data!.chunksSize ?? '',
});
}, [data]);

View File

@@ -1,5 +1,4 @@
import { Response } from '@/lib/api/response';
import { bytes } from '@/lib/bytes';
import {
Button,
LoadingOverlay,
@@ -82,7 +81,7 @@ export default function ServerSettingsFiles({
filesLength: data?.filesLength ?? 6,
filesDefaultFormat: data?.filesDefaultFormat ?? 'random',
filesDisabledExtensions: data?.filesDisabledExtensions.join(', ') ?? '',
filesMaxFileSize: bytes(data?.filesMaxFileSize ?? 104857600),
filesMaxFileSize: data?.filesMaxFileSize ?? '100mb',
filesDefaultExpiration: data?.filesDefaultExpiration ?? '',
filesAssumeMimetypes: data?.filesAssumeMimetypes ?? false,
filesDefaultDateFormat: data?.filesDefaultDateFormat ?? 'YYYY-MM-DD_HH:mm:ss',

View File

@@ -2,7 +2,6 @@ import { Response } from '@/lib/api/response';
import { Button, LoadingOverlay, Paper, SimpleGrid, Text, TextInput, Title } from '@mantine/core';
import { useForm } from '@mantine/form';
import { IconDeviceFloppy } from '@tabler/icons-react';
import ms from 'ms';
import { useRouter } from 'next/router';
import { useEffect } from 'react';
import { settingsOnSubmit } from '../settingsOnSubmit';
@@ -15,11 +14,11 @@ export default function ServerSettingsTasks({
const router = useRouter();
const form = useForm({
initialValues: {
tasksDeleteInterval: ms(1800000),
tasksClearInvitesInterval: ms(1800000),
tasksMaxViewsInterval: ms(1800000),
tasksThumbnailsInterval: ms(1800000),
tasksMetricsInterval: ms(1800000),
tasksDeleteInterval: '30m',
tasksClearInvitesInterval: '30m',
tasksMaxViewsInterval: '30m',
tasksThumbnailsInterval: '30m',
tasksMetricsInterval: '30m',
},
});
@@ -29,11 +28,11 @@ export default function ServerSettingsTasks({
if (!data) return;
form.setValues({
tasksDeleteInterval: ms(data?.tasksDeleteInterval ?? 1800000),
tasksClearInvitesInterval: ms(data?.tasksClearInvitesInterval ?? 1800000),
tasksMaxViewsInterval: ms(data?.tasksMaxViewsInterval ?? 1800000),
tasksThumbnailsInterval: ms(data?.tasksThumbnailsInterval ?? 1800000),
tasksMetricsInterval: ms(data?.tasksMetricsInterval ?? 1800000),
tasksDeleteInterval: data?.tasksDeleteInterval ?? '30m',
tasksClearInvitesInterval: data?.tasksClearInvitesInterval ?? '30m',
tasksMaxViewsInterval: data?.tasksMaxViewsInterval ?? '30m',
tasksThumbnailsInterval: data?.tasksThumbnailsInterval ?? '30m',
tasksMetricsInterval: data?.tasksMetricsInterval ?? '30m',
});
}, [data]);

View File

@@ -70,7 +70,7 @@ export default function UploadFile() {
const toPartialFiles: File[] = [];
for (let i = 0; i !== files.length; ++i) {
const file = files[i];
if (config.chunks.enabled && file.size >= config.chunks.max) {
if (config.chunks.enabled && file.size >= bytes(config.chunks.max)) {
toPartialFiles.push(file);
}
}
@@ -88,7 +88,7 @@ export default function UploadFile() {
});
} else {
const size = aggSize();
if (size > config.files.maxFileSize && !toPartialFiles.length) {
if (size > bytes(config.files.maxFileSize) && !toPartialFiles.length) {
notifications.show({
title: 'Upload may fail',
color: 'yellow',
@@ -97,7 +97,7 @@ export default function UploadFile() {
<>
The upload may fail because the total size of the files (that are not being partially uploaded)
you are trying to upload is <b>{bytes(size)}</b>, which is larger than the limit of{' '}
<b>{bytes(config.files.maxFileSize)}</b>
<b>{bytes(bytes(config.files.maxFileSize))}</b>
</>
),
});
@@ -167,7 +167,7 @@ export default function UploadFile() {
Attach as many files as you like, they will show up below to review before uploading.
</Text>
<Text size='sm' c='dimmed' mt={7}>
<b>{bytes(config.files.maxFileSize)}</b> limit per file
<b>{bytes(bytes(config.files.maxFileSize))}</b> limit per file
</Text>
</div>
</Group>

View File

@@ -1,5 +1,6 @@
import { useConfig } from '@/components/ConfigProvider';
import { Response } from '@/lib/api/response';
import { bytes } from '@/lib/bytes';
import { randomCharacters } from '@/lib/random';
import { ErrorBody } from '@/lib/response';
import { UploadOptionsStore } from '@/lib/store/uploadOptions';
@@ -94,10 +95,12 @@ export async function uploadPartialFiles(
setLoading(true);
setProgress({ percent: 0, remaining: 0, speed: 0 });
const chunkSize = bytes(config.chunks.size);
for (let i = 0; i !== files.length; ++i) {
const file = files[i];
const identifier = randomCharacters(8);
const nChunks = Math.ceil(file.size / config.chunks.size);
const nChunks = Math.ceil(file.size / chunkSize);
const chunks: {
blob: Blob;
start: number;
@@ -105,8 +108,8 @@ export async function uploadPartialFiles(
}[] = [];
for (let j = 0; j !== nChunks; ++j) {
const start = j * config.chunks.size;
const end = Math.min(start + config.chunks.size, file.size);
const start = j * chunkSize;
const end = Math.min(start + chunkSize, file.size);
chunks.push({
blob: file.slice(start, end),
start,

View File

@@ -35,8 +35,8 @@ export async function handleFile({
const extension = options.overrides?.extension ?? extname(file.filename);
if (config.files.disabledExtensions.includes(extension)) throw `File extension ${extension} is not allowed`;
if (file.file.bytesRead > config.files.maxFileSize)
throw `File size is too large. Maximum file size is ${config.files.maxFileSize} bytes`;
if (file.file.bytesRead > bytes(config.files.maxFileSize))
throw `File size is too large. Maximum file size is ${bytes(config.files.maxFileSize)} bytes`;
const format = options.format || config.files.defaultFormat;
let fileName = formatFileName(format, file.filename);

View File

@@ -1,10 +1,8 @@
import { type ZodIssue, z } from 'zod';
import { PROP_TO_ENV, ParsedConfig } from './read';
import { log } from '../logger';
import { join, resolve } from 'path';
import { bytes } from '../bytes';
import ms from 'ms';
import { tmpdir } from 'os';
import { join, resolve } from 'path';
import { type ZodIssue, z } from 'zod';
import { log } from '../logger';
import { PROP_TO_ENV, ParsedConfig } from './read';
declare global {
// eslint-disable-next-line @typescript-eslint/no-namespace
@@ -75,23 +73,23 @@ export const schema = z.object({
.default(join(tmpdir(), 'zipline')),
}),
chunks: z.object({
max: z.number().default(bytes('95mb')),
size: z.number().default(bytes('25mb')),
max: z.string().default('95mb'),
size: z.string().default('25mb'),
enabled: z.boolean().default(true),
}),
tasks: z.object({
deleteInterval: z.number().default(ms('30min')),
clearInvitesInterval: z.number().default(ms('30min')),
maxViewsInterval: z.number().default(ms('30min')),
thumbnailsInterval: z.number().default(ms('30min')),
metricsInterval: z.number().default(ms('30min')),
deleteInterval: z.string().default('30min'),
clearInvitesInterval: z.string().default('30min'),
maxViewsInterval: z.string().default('30min'),
thumbnailsInterval: z.string().default('30min'),
metricsInterval: z.string().default('30min'),
}),
files: z.object({
route: z.string().startsWith('/').min(1).trim().toLowerCase().default('/u'),
length: z.number().default(6),
defaultFormat: z.enum(['random', 'date', 'uuid', 'name', 'gfycat']).default('random'),
disabledExtensions: z.array(z.string()).default([]),
maxFileSize: z.number().default(bytes('100mb')),
maxFileSize: z.string().default('100mb'),
defaultExpiration: z.string().nullable().default(null),
assumeMimetypes: z.boolean().default(false),
defaultDateFormat: z.string().default('YYYY-MM-DD_HH:mm:ss'),

View File

@@ -19,6 +19,8 @@ export function withSafeConfig<T = unknown>(
const config = safeConfig(libConfig);
const data = await fn(ctx, config);
console.log(config, data);
if ((data as any) && (data as any).notFound)
return {
notFound: true,

View File

@@ -60,7 +60,6 @@ async function handler({ code, host, state }: OAuthQuery, _logger: Logger): Prom
const json = await res.json();
if (!json.access_token) return { error: 'No access token in response' };
if (!json.refresh_token) return { error: 'No refresh token in response' };
const userJson = await googleAuth.user(json.access_token);
if (!userJson) return { error: 'Failed to fetch user' };

View File

@@ -1,3 +1,4 @@
import { bytes } from '@/lib/bytes';
import { reloadSettings } from '@/lib/config';
import { getDatasource } from '@/lib/datasource';
import { prisma } from '@/lib/db';
@@ -19,6 +20,7 @@ import { fastifySensible } from '@fastify/sensible';
import { fastifyStatic } from '@fastify/static';
import fastify from 'fastify';
import { mkdir } from 'fs/promises';
import ms from 'ms';
import { parse } from 'url';
import { version } from '../../package.json';
import { checkRateLimit } from './plugins/checkRateLimit';
@@ -84,7 +86,7 @@ async function main() {
await server.register(fastifyMultipart, {
limits: {
fileSize: config.files.maxFileSize,
fileSize: bytes(config.files.maxFileSize),
},
});
@@ -203,10 +205,10 @@ async function main() {
logger.info('server started', { hostname: config.core.hostname, port: config.core.port });
// Tasks
tasks.interval('deletefiles', config.tasks.deleteInterval, deleteFiles(prisma));
tasks.interval('maxviews', config.tasks.maxViewsInterval, maxViews(prisma));
tasks.interval('deletefiles', ms(config.tasks.deleteInterval), deleteFiles(prisma));
tasks.interval('maxviews', ms(config.tasks.maxViewsInterval), maxViews(prisma));
if (config.features.metrics) tasks.interval('metrics', config.tasks.metricsInterval, metrics(prisma));
if (config.features.metrics) tasks.interval('metrics', ms(config.tasks.metricsInterval), metrics(prisma));
if (config.features.thumbnails.enabled) {
for (let i = 0; i !== config.features.thumbnails.num_threads; ++i) {
@@ -216,8 +218,8 @@ async function main() {
});
}
tasks.interval('thumbnails', config.tasks.thumbnailsInterval, thumbnails(prisma));
tasks.interval('clearinvites', config.tasks.clearInvitesInterval, clearInvites(prisma));
tasks.interval('thumbnails', ms(config.tasks.thumbnailsInterval), thumbnails(prisma));
tasks.interval('clearinvites', ms(config.tasks.clearInvitesInterval), clearInvites(prisma));
}
tasks.start();

View File

@@ -18,17 +18,8 @@ type Settings = Awaited<ReturnType<typeof readDatabaseSettings>>;
export type ApiServerSettingsResponse = Settings;
type Body = Partial<Settings>;
const zMs = z
.union([z.number().min(1), z.string()])
.transform((value) => (typeof value === 'string' ? ms(value) : value))
.refine((value) => value > 0, 'Value must be greater than 0');
const zMsString = z.string().refine((value) => ms(value) > 0, 'Value must be greater than 0');
const zBytes = z
.union([z.number().min(1), z.string()])
.transform((value) => (typeof value === 'string' ? bytes(value) : value))
.refine((value) => value > 0, 'Value must be greater than 0');
const zMs = z.string().refine((value) => ms(value) > 0, 'Value must be greater than 0');
const zBytes = z.string().refine((value) => bytes(value) > 0, 'Value must be greater than 0');
const discordEmbed = z
.union([
@@ -125,7 +116,7 @@ export default fastifyPlugin(
),
filesMaxFileSize: zBytes,
filesDefaultExpiration: zMsString.nullable(),
filesDefaultExpiration: zMs.nullable(),
filesAssumeMimetypes: z.boolean(),
filesDefaultDateFormat: z.string(),
filesRemoveGpsMetadata: z.boolean(),