mirror of
https://github.com/diced/zipline.git
synced 2025-12-25 12:25:08 -08:00
Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f36ab9e7b6 | ||
|
|
34a993fcc6 | ||
|
|
aa9f0796ab | ||
|
|
c0b2dda7da | ||
|
|
1e507bbf9c | ||
|
|
6271b800c2 | ||
|
|
effe1f9ec1 | ||
|
|
b6615621e1 | ||
|
|
145b1ca727 | ||
|
|
6f75bbee7b | ||
|
|
58a4580cf0 |
@@ -7,4 +7,6 @@ RUN usermod -l zipline node \
|
||||
&& chmod 0440 /etc/sudoers.d/zipline \
|
||||
&& sudo apt-get update && apt-get install gnupg2 -y
|
||||
|
||||
USER zipline
|
||||
EXPOSE 3000
|
||||
|
||||
USER zipline
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"remoteUser": "zipline",
|
||||
"updateRemoteUserUID": true,
|
||||
"remoteEnv": {
|
||||
"CORE_DATABASE_URL": "postgres://postgres:postgres@localhost/zip10"
|
||||
"CORE_DATABASE_URL": "postgres://postgres:postgres@db/zip10"
|
||||
},
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
|
||||
@@ -7,7 +7,7 @@ CORE_RETURN_HTTPS=true
|
||||
CORE_SECRET="changethis"
|
||||
CORE_HOST=0.0.0.0
|
||||
CORE_PORT=3000
|
||||
CORE_DATABASE_URL="postgres://postgres:postgres@localhost/zip10"
|
||||
CORE_DATABASE_URL="postgres://postgres:postgres@db/zip10"
|
||||
CORE_LOGGER=false
|
||||
CORE_STATS_INTERVAL=1800
|
||||
CORE_INVITES_INTERVAL=1800
|
||||
@@ -47,4 +47,4 @@ URLS_LENGTH=6
|
||||
RATELIMIT_USER=5
|
||||
RATELIMIT_ADMIN=3
|
||||
|
||||
# for more variables checkout the docs
|
||||
# for more variables checkout the docs
|
||||
|
||||
16
.github/workflows/docker-release.yml
vendored
16
.github/workflows/docker-release.yml
vendored
@@ -13,8 +13,8 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
push_to_ghcr:
|
||||
name: Push Release Image to GitHub Packages
|
||||
push:
|
||||
name: Push Release Image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
@@ -32,14 +32,20 @@ jobs:
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to Github Packages
|
||||
- name: Login to GitHub Packages
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker Image
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
@@ -47,5 +53,7 @@ jobs:
|
||||
tags: |
|
||||
ghcr.io/diced/zipline:latest
|
||||
ghcr.io/diced/zipline:${{ steps.version.outputs.zipline_version }}
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/zipline:latest
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/zipline:${{ steps.version.outputs.zipline_version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
14
.github/workflows/docker.yml
vendored
14
.github/workflows/docker.yml
vendored
@@ -12,8 +12,8 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
push_to_ghcr:
|
||||
name: Push Image to GitHub Packages
|
||||
push:
|
||||
name: Push Image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
@@ -38,7 +38,13 @@ jobs:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker Image
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
@@ -46,5 +52,7 @@ jobs:
|
||||
tags: |
|
||||
ghcr.io/diced/zipline:trunk
|
||||
ghcr.io/diced/zipline:trunk-${{ steps.version.outputs.zipline_version }}
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/zipline:trunk
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/zipline:trunk-${{ steps.version.outputs.zipline_version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Use the Prisma binaries image as the first stage
|
||||
FROM ghcr.io/diced/prisma-binaries:5.1.x as prisma
|
||||
FROM ghcr.io/diced/prisma-binaries:5.1.x AS prisma
|
||||
|
||||
# Use Alpine Linux as the second stage
|
||||
FROM node:18-alpine3.16 as base
|
||||
FROM node:18-alpine3.16 AS base
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /zipline
|
||||
@@ -27,7 +27,7 @@ ENV PRISMA_QUERY_ENGINE_BINARY=/prisma-engines/query-engine \
|
||||
# Install the dependencies
|
||||
RUN yarn install --immutable
|
||||
|
||||
FROM base as builder
|
||||
FROM base AS builder
|
||||
|
||||
COPY src ./src
|
||||
COPY next.config.js ./next.config.js
|
||||
|
||||
@@ -169,4 +169,4 @@ Create a pull request on GitHub. If your PR does not pass the action checks, the
|
||||
|
||||
# Documentation
|
||||
|
||||
Documentation source code is located in [diced/zipline-docs](https://github.com/diced/zipline-docs), and can be accessed [here](https://zipl.vercel.app).
|
||||
Documentation source code is located in [diced/zipline-docs](https://github.com/diced/zipline-docs), and can be accessed [here](https://zipline.diced.sh/docs/get-started).
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "zipline",
|
||||
"version": "3.7.10",
|
||||
"version": "3.7.11",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "npm-run-all build:server dev:run",
|
||||
@@ -79,7 +79,7 @@
|
||||
"@types/katex": "^0.16.6",
|
||||
"@types/minio": "^7.1.1",
|
||||
"@types/multer": "^1.4.10",
|
||||
"@types/node": "^18.18.10",
|
||||
"@types/node": "18",
|
||||
"@types/qrcode": "^1.5.5",
|
||||
"@types/react": "^18.2.37",
|
||||
"@types/sharp": "^0.32.0",
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import {
|
||||
ActionIcon,
|
||||
Alert,
|
||||
Anchor,
|
||||
Box,
|
||||
Button,
|
||||
Card,
|
||||
Code,
|
||||
ColorInput,
|
||||
CopyButton,
|
||||
FileInput,
|
||||
Group,
|
||||
Image,
|
||||
List,
|
||||
PasswordInput,
|
||||
SimpleGrid,
|
||||
Space,
|
||||
@@ -22,6 +25,7 @@ import { randomId, useInterval, useMediaQuery } from '@mantine/hooks';
|
||||
import { useModals } from '@mantine/modals';
|
||||
import { showNotification, updateNotification } from '@mantine/notifications';
|
||||
import {
|
||||
IconAlertCircle,
|
||||
IconBrandDiscordFilled,
|
||||
IconBrandGithubFilled,
|
||||
IconBrandGoogle,
|
||||
@@ -376,6 +380,129 @@ export default function Manage({ oauth_registration, oauth_providers: raw_oauth_
|
||||
}
|
||||
};
|
||||
|
||||
const startFullExport = () => {
|
||||
modals.openConfirmModal({
|
||||
title: <Title>Are you sure?</Title>,
|
||||
size: 'xl',
|
||||
children: (
|
||||
<Box px='md'>
|
||||
<Alert color='red' icon={<IconAlertCircle size='1rem' />} title='Warning'>
|
||||
This export contains a significant amount of sensitive data, including user information,
|
||||
passwords, metadata, and system details. It is crucial to handle this file with care to prevent
|
||||
unauthorized access or misuse. Ensure it is stored securely and shared only with trusted parties.
|
||||
</Alert>
|
||||
|
||||
<p>
|
||||
The export provides a snapshot of Zipline's data and environment. Specifically, it includes:
|
||||
</p>
|
||||
|
||||
<List>
|
||||
<List.Item>
|
||||
<b>User Data:</b> Information about users, avatars, passwords, and registered OAuth providers.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Files:</b> Metadata about uploaded files including filenames, passwords, sizes, and
|
||||
timestamps, linked users. <i>(Note: the actual contents of the files are not included.)</i>
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>URLs:</b> Metadata about shortened URLs, including the original URL, short URL, and vanity.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Folders:</b> Metadata about folders, including names, visibility settings, and files.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Thumbnails:</b> Metadata about thumbnails, includes the name and creation timestamp.{' '}
|
||||
<i>(Actual image data is excluded.)</i>
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Invites:</b> Metadata about invites, includes the invite code, creator, and expiration date.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Statistics:</b> Usage data that is used on the statistics page, including upload counts and
|
||||
such.
|
||||
</List.Item>
|
||||
</List>
|
||||
<p>
|
||||
Additionally, the export captures <b>system-specific information</b>:
|
||||
</p>
|
||||
<List>
|
||||
<List.Item>
|
||||
<b>CPU Count:</b> The number of processing cores available on the host system.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Hostname:</b> The network identifier of the host system.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Architecture:</b> The hardware architecture (e.g., <Code>x86</Code>, <Code>arm</Code>) on
|
||||
which Zipline is running.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Platform:</b> The operating system platform (e.g., <Code>linux</Code>, <Code>darwin</Code>)
|
||||
on which Zipline is running.
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Version:</b> The current version of the operating system (kernel version)
|
||||
</List.Item>
|
||||
<List.Item>
|
||||
<b>Environment Variables:</b> The configuration settings and variables defined at the time of
|
||||
execution.
|
||||
</List.Item>
|
||||
</List>
|
||||
|
||||
<p>
|
||||
<i>Note:</i> By omitting the actual contents of files and thumbnails while including their
|
||||
metadata, the export ensures it captures enough detail for migration to another instance, or for
|
||||
v4.
|
||||
</p>
|
||||
</Box>
|
||||
),
|
||||
labels: { confirm: 'Yes', cancel: 'No' },
|
||||
cancelProps: { color: 'red' },
|
||||
onConfirm: async () => {
|
||||
modals.closeAll();
|
||||
showNotification({
|
||||
title: 'Exporting all server data...',
|
||||
message: 'This may take a while depending on the amount of data.',
|
||||
loading: true,
|
||||
id: 'export-all',
|
||||
autoClose: false,
|
||||
});
|
||||
|
||||
const res = await useFetch('/api/admin/export', 'GET');
|
||||
if (res.error) {
|
||||
updateNotification({
|
||||
id: 'export-all',
|
||||
title: 'Error exporting data',
|
||||
message: res.error,
|
||||
color: 'red',
|
||||
icon: <IconFileExport size='1rem' />,
|
||||
autoClose: true,
|
||||
});
|
||||
} else {
|
||||
updateNotification({
|
||||
title: 'Export created',
|
||||
message: 'Your browser will prompt you to download a JSON file with all the server data.',
|
||||
id: 'export-all',
|
||||
color: 'green',
|
||||
icon: <IconFileExport size='1rem' />,
|
||||
autoClose: true,
|
||||
});
|
||||
|
||||
const blob = new Blob([JSON.stringify(res)], { type: 'application/json' });
|
||||
const a = document.createElement('a');
|
||||
a.style.display = 'none';
|
||||
const url = URL.createObjectURL(blob);
|
||||
console.log(url, res);
|
||||
a.setAttribute('download', `zipline_export_${Date.now()}.json`);
|
||||
a.setAttribute('href', url);
|
||||
a.click();
|
||||
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const interval = useInterval(() => getExports(), 30000);
|
||||
useEffect(() => {
|
||||
getExports();
|
||||
@@ -642,6 +769,11 @@ export default function Manage({ oauth_registration, oauth_providers: raw_oauth_
|
||||
>
|
||||
Delete all uploads
|
||||
</Button>
|
||||
{user.superAdmin && (
|
||||
<Button size='md' onClick={startFullExport} rightIcon={<IconFileExport size='1rem' />}>
|
||||
Export all server data (JSON)
|
||||
</Button>
|
||||
)}
|
||||
</Group>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
@@ -15,7 +15,7 @@ export default function PrismCode({ code, ext, ...props }) {
|
||||
|
||||
return (
|
||||
<Prism
|
||||
sx={(t) => ({ height: '100vh', backgroundColor: t.colors.dark[8] })}
|
||||
sx={(t) => ({ height: '100vh', overflow: 'scroll', backgroundColor: t.colors.dark[8] })}
|
||||
withLineNumbers
|
||||
language={exts[ext]?.toLowerCase()}
|
||||
{...props}
|
||||
|
||||
@@ -278,7 +278,7 @@ export default function validate(config): Config {
|
||||
}
|
||||
}
|
||||
|
||||
const reserved = ['/view', '/dashboard', '/code', '/folder', '/api', '/auth'];
|
||||
const reserved = ['/view', '/dashboard', '/code', '/folder', '/api', '/auth', '/r'];
|
||||
if (reserved.some((r) => validated.uploader.route.startsWith(r))) {
|
||||
throw {
|
||||
errors: [`The uploader route cannot be ${validated.uploader.route}, this is a reserved route.`],
|
||||
|
||||
@@ -6,7 +6,7 @@ export abstract class Datasource {
|
||||
public abstract save(file: string, data: Buffer, options?: { type: string }): Promise<void>;
|
||||
public abstract delete(file: string): Promise<void>;
|
||||
public abstract clear(): Promise<void>;
|
||||
public abstract size(file: string): Promise<number>;
|
||||
public abstract get(file: string): Readable | Promise<Readable>;
|
||||
public abstract size(file: string): Promise<number | null>;
|
||||
public abstract get(file: string, start?: number, end?: number): Readable | Promise<Readable>;
|
||||
public abstract fullSize(): Promise<number>;
|
||||
}
|
||||
|
||||
@@ -26,20 +26,20 @@ export class Local extends Datasource {
|
||||
}
|
||||
}
|
||||
|
||||
public get(file: string): ReadStream {
|
||||
public get(file: string, start: number = 0, end: number = Infinity): ReadStream {
|
||||
const full = join(this.path, file);
|
||||
if (!existsSync(full)) return null;
|
||||
|
||||
try {
|
||||
return createReadStream(full);
|
||||
return createReadStream(full, { start, end });
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async size(file: string): Promise<number> {
|
||||
public async size(file: string): Promise<number | null> {
|
||||
const full = join(this.path, file);
|
||||
if (!existsSync(full)) return 0;
|
||||
if (!existsSync(full)) return null;
|
||||
const stats = await stat(full);
|
||||
|
||||
return stats.size;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Datasource } from '.';
|
||||
import { Readable } from 'stream';
|
||||
import { ConfigS3Datasource } from 'lib/config/Config';
|
||||
import { Client } from 'minio';
|
||||
import { BucketItemStat, Client } from 'minio';
|
||||
|
||||
export class S3 extends Datasource {
|
||||
public name = 'S3';
|
||||
@@ -45,19 +45,34 @@ export class S3 extends Datasource {
|
||||
});
|
||||
}
|
||||
|
||||
public get(file: string): Promise<Readable> {
|
||||
public get(file: string, start: number = 0, end: number = Infinity): Promise<Readable> {
|
||||
return new Promise((res) => {
|
||||
this.s3.getObject(this.config.bucket, file, (err, stream) => {
|
||||
if (err) res(null);
|
||||
else res(stream);
|
||||
});
|
||||
this.s3.getPartialObject(
|
||||
this.config.bucket,
|
||||
file,
|
||||
start,
|
||||
// undefined means to read the rest of the file from the start (offset)
|
||||
end === Infinity ? undefined : end,
|
||||
(err, stream) => {
|
||||
if (err) res(null);
|
||||
else res(stream);
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
public async size(file: string): Promise<number> {
|
||||
const stat = await this.s3.statObject(this.config.bucket, file);
|
||||
|
||||
return stat.size;
|
||||
public size(file: string): Promise<number | null> {
|
||||
return new Promise((res) => {
|
||||
this.s3.statObject(
|
||||
this.config.bucket,
|
||||
file,
|
||||
// @ts-expect-error this callback is not in the types but the code for it is there
|
||||
(err: unknown, stat: BucketItemStat) => {
|
||||
if (err) res(null);
|
||||
else res(stat.size);
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
public async fullSize(): Promise<number> {
|
||||
|
||||
@@ -72,12 +72,13 @@ export class Supabase extends Datasource {
|
||||
}
|
||||
}
|
||||
|
||||
public async get(file: string): Promise<Readable> {
|
||||
public async get(file: string, start: number = 0, end: number = Infinity): Promise<Readable> {
|
||||
// get a readable stream from the request
|
||||
const r = await fetch(`${this.config.url}/storage/v1/object/${this.config.bucket}/${file}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.key}`,
|
||||
Range: `bytes=${start}-${end === Infinity ? '' : end}`,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -85,7 +86,7 @@ export class Supabase extends Datasource {
|
||||
return Readable.fromWeb(r.body as any);
|
||||
}
|
||||
|
||||
public size(file: string): Promise<number> {
|
||||
public size(file: string): Promise<number | null> {
|
||||
return new Promise(async (res) => {
|
||||
fetch(`${this.config.url}/storage/v1/object/list/${this.config.bucket}`, {
|
||||
method: 'POST',
|
||||
@@ -102,11 +103,11 @@ export class Supabase extends Datasource {
|
||||
.then((j) => {
|
||||
if (j.error) {
|
||||
this.logger.error(`${j.error}: ${j.message}`);
|
||||
res(0);
|
||||
res(null);
|
||||
}
|
||||
|
||||
if (j.length === 0) {
|
||||
res(0);
|
||||
res(null);
|
||||
} else {
|
||||
res(j[0].metadata.size);
|
||||
}
|
||||
|
||||
@@ -67,26 +67,7 @@ export const withOAuth =
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
logger.debug(`Failed to find existing oauth. Using fallback. ${e}`);
|
||||
if (e.code === 'P2022' || e.code === 'P2025') {
|
||||
const existing = await prisma.user.findFirst({
|
||||
where: {
|
||||
oauth: {
|
||||
some: {
|
||||
provider: provider.toUpperCase() as OauthProviders,
|
||||
username: oauth_resp.username,
|
||||
},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
oauth: true,
|
||||
},
|
||||
});
|
||||
existingOauth = existing?.oauth?.find((o) => o.provider === provider.toUpperCase());
|
||||
if (existingOauth) existingOauth.fallback = true;
|
||||
} else {
|
||||
logger.error(`Failed to find existing oauth. ${e}`);
|
||||
}
|
||||
logger.error(`Failed to find existing oauth, this likely will result in a failure: ${e}`);
|
||||
}
|
||||
|
||||
const existingUser = await prisma.user.findFirst({
|
||||
@@ -157,7 +138,7 @@ export const withOAuth =
|
||||
logger.info(`User ${user.username} (${user.id}) logged in via oauth(${provider})`);
|
||||
|
||||
return res.redirect('/dashboard');
|
||||
} else if ((existingOauth && existingOauth.fallback) || existingOauth) {
|
||||
} else if (existingOauth) {
|
||||
await prisma.oAuth.update({
|
||||
where: {
|
||||
id: existingOauth?.id,
|
||||
|
||||
9
src/lib/utils/range.ts
Normal file
9
src/lib/utils/range.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export function parseRangeHeader(header?: string): [number, number] {
|
||||
if (!header || !header.startsWith('bytes=')) return [0, Infinity];
|
||||
|
||||
const range = header.replace('bytes=', '').split('-');
|
||||
const start = Number(range[0]) || 0;
|
||||
const end = Number(range[1]) || Infinity;
|
||||
|
||||
return [start, end];
|
||||
}
|
||||
315
src/pages/api/admin/export.ts
Normal file
315
src/pages/api/admin/export.ts
Normal file
@@ -0,0 +1,315 @@
|
||||
import { readFile } from 'fs/promises';
|
||||
import Logger from 'lib/logger';
|
||||
import prisma from 'lib/prisma';
|
||||
import { randomChars } from 'lib/util';
|
||||
import { bytesToHuman } from 'lib/utils/bytes';
|
||||
import { NextApiReq, NextApiRes, UserExtended, withZipline } from 'middleware/withZipline';
|
||||
import os from 'os';
|
||||
|
||||
const logger = Logger.get('admin').child('export');
|
||||
|
||||
type Zipline3Export = {
|
||||
versions: {
|
||||
zipline: string;
|
||||
node: string;
|
||||
export: '3';
|
||||
};
|
||||
|
||||
request: {
|
||||
user: string;
|
||||
date: string;
|
||||
os: {
|
||||
platform: 'aix' | 'darwin' | 'freebsd' | 'linux' | 'openbsd' | 'sunos' | 'win32';
|
||||
arch:
|
||||
| 'arm'
|
||||
| 'arm64'
|
||||
| 'ia32'
|
||||
| 'loong64'
|
||||
| 'mips'
|
||||
| 'mipsel'
|
||||
| 'ppc'
|
||||
| 'ppc64'
|
||||
| 'riscv64'
|
||||
| 's390'
|
||||
| 's390x'
|
||||
| 'x64';
|
||||
cpus: number;
|
||||
hostname: string;
|
||||
release: string;
|
||||
};
|
||||
env: NodeJS.ProcessEnv;
|
||||
};
|
||||
|
||||
// Creates a unique identifier for each model
|
||||
// used to map the user's stuff to other data owned by the user
|
||||
user_map: Record<number, string>;
|
||||
thumbnail_map: Record<number, string>;
|
||||
folder_map: Record<number, string>;
|
||||
file_map: Record<number, string>;
|
||||
url_map: Record<number, string>;
|
||||
invite_map: Record<number, string>;
|
||||
|
||||
users: {
|
||||
[id: string]: {
|
||||
username: string;
|
||||
password: string;
|
||||
avatar: string;
|
||||
administrator: boolean;
|
||||
super_administrator: boolean;
|
||||
embed: {
|
||||
title?: string;
|
||||
site_name?: string;
|
||||
description?: string;
|
||||
color?: string;
|
||||
};
|
||||
totp_secret: string;
|
||||
oauth: {
|
||||
provider: 'DISCORD' | 'GITHUB' | 'GOOGLE';
|
||||
username: string;
|
||||
oauth_id: string;
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
}[];
|
||||
};
|
||||
};
|
||||
|
||||
files: {
|
||||
[id: string]: {
|
||||
name: string;
|
||||
original_name: string;
|
||||
type: `${string}/${string}`;
|
||||
size: number | bigint;
|
||||
user: string | null;
|
||||
thumbnail?: string;
|
||||
max_views: number;
|
||||
views: number;
|
||||
expires_at?: string;
|
||||
created_at: string;
|
||||
favorite: boolean;
|
||||
password?: string;
|
||||
};
|
||||
};
|
||||
|
||||
thumbnails: {
|
||||
[id: string]: {
|
||||
name: string;
|
||||
created_at: string;
|
||||
};
|
||||
};
|
||||
|
||||
folders: {
|
||||
[id: string]: {
|
||||
name: string;
|
||||
public: boolean;
|
||||
created_at: string;
|
||||
user: string;
|
||||
files: string[];
|
||||
};
|
||||
};
|
||||
|
||||
urls: {
|
||||
[id: number]: {
|
||||
destination: string;
|
||||
vanity?: string;
|
||||
code: string;
|
||||
created_at: string;
|
||||
max_views: number;
|
||||
views: number;
|
||||
user: string;
|
||||
};
|
||||
};
|
||||
|
||||
invites: {
|
||||
[id: string]: {
|
||||
code: string;
|
||||
expites_at?: string;
|
||||
created_at: string;
|
||||
used: boolean;
|
||||
|
||||
created_by_user: string;
|
||||
};
|
||||
};
|
||||
|
||||
stats: {
|
||||
created_at: string;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
}[];
|
||||
};
|
||||
|
||||
async function handler(req: NextApiReq, res: NextApiRes, user: UserExtended) {
|
||||
if (!user.superAdmin) return res.forbidden('You must be a super administrator to export data');
|
||||
|
||||
const pkg = JSON.parse(await readFile('package.json', 'utf8'));
|
||||
|
||||
const exportData: Partial<Zipline3Export> = {
|
||||
versions: {
|
||||
zipline: pkg.version,
|
||||
node: process.version,
|
||||
export: '3',
|
||||
},
|
||||
request: {
|
||||
user: '',
|
||||
date: new Date().toISOString(),
|
||||
os: {
|
||||
platform: os.platform() as Zipline3Export['request']['os']['platform'],
|
||||
arch: os.arch() as Zipline3Export['request']['os']['arch'],
|
||||
cpus: os.cpus().length,
|
||||
hostname: os.hostname(),
|
||||
release: os.release(),
|
||||
},
|
||||
env: process.env,
|
||||
},
|
||||
user_map: {},
|
||||
thumbnail_map: {},
|
||||
folder_map: {},
|
||||
file_map: {},
|
||||
url_map: {},
|
||||
invite_map: {},
|
||||
|
||||
users: {},
|
||||
files: {},
|
||||
thumbnails: {},
|
||||
folders: {},
|
||||
urls: {},
|
||||
invites: {},
|
||||
stats: [],
|
||||
};
|
||||
|
||||
const users = await prisma.user.findMany({
|
||||
include: {
|
||||
oauth: true,
|
||||
},
|
||||
});
|
||||
|
||||
for (const user of users) {
|
||||
const uniqueId = randomChars(32);
|
||||
exportData.user_map[user.id] = uniqueId;
|
||||
|
||||
exportData.users[uniqueId] = {
|
||||
username: user.username,
|
||||
password: user.password,
|
||||
avatar: user.avatar,
|
||||
administrator: user.administrator,
|
||||
super_administrator: user.superAdmin,
|
||||
embed: user.embed as Zipline3Export['users'][string]['embed'],
|
||||
totp_secret: user.totpSecret,
|
||||
oauth: user.oauth.map((oauth) => ({
|
||||
provider: oauth.provider as Zipline3Export['users'][string]['oauth'][0]['provider'],
|
||||
username: oauth.username,
|
||||
oauth_id: oauth.oauthId,
|
||||
access_token: oauth.token,
|
||||
refresh_token: oauth.refresh,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
const folders = await prisma.folder.findMany({ include: { files: true } });
|
||||
for (const folder of folders) {
|
||||
const uniqueId = randomChars(32);
|
||||
exportData.folder_map[folder.id] = uniqueId;
|
||||
|
||||
exportData.folders[uniqueId] = {
|
||||
name: folder.name,
|
||||
public: folder.public,
|
||||
created_at: folder.createdAt.toISOString(),
|
||||
user: exportData.user_map[folder.userId],
|
||||
files: [], // mapped later
|
||||
};
|
||||
}
|
||||
|
||||
const thumbnails = await prisma.thumbnail.findMany();
|
||||
for (const thumbnail of thumbnails) {
|
||||
const uniqueId = randomChars(32);
|
||||
exportData.thumbnail_map[thumbnail.id] = uniqueId;
|
||||
|
||||
exportData.thumbnails[uniqueId] = {
|
||||
name: thumbnail.name,
|
||||
created_at: thumbnail.createdAt.toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
const files = await prisma.file.findMany({ include: { thumbnail: true } });
|
||||
|
||||
for (const file of files) {
|
||||
const uniqueId = randomChars(32);
|
||||
exportData.file_map[file.id] = uniqueId;
|
||||
|
||||
exportData.files[uniqueId] = {
|
||||
name: file.name,
|
||||
original_name: file.originalName,
|
||||
type: file.mimetype as Zipline3Export['files'][0]['type'],
|
||||
size: file.size,
|
||||
user: file.userId ? exportData.user_map[file.userId] : null,
|
||||
thumbnail: file.thumbnail ? exportData.thumbnail_map[file.thumbnail.id] : undefined,
|
||||
max_views: file.maxViews,
|
||||
views: file.views,
|
||||
expires_at: file.expiresAt?.toISOString(),
|
||||
created_at: file.createdAt.toISOString(),
|
||||
favorite: file.favorite,
|
||||
password: file.password,
|
||||
};
|
||||
}
|
||||
|
||||
const urls = await prisma.url.findMany();
|
||||
|
||||
for (const url of urls) {
|
||||
const uniqueId = randomChars(32);
|
||||
exportData.url_map[url.id] = uniqueId;
|
||||
|
||||
exportData.urls[uniqueId] = {
|
||||
destination: url.destination,
|
||||
vanity: url.vanity,
|
||||
created_at: url.createdAt.toISOString(),
|
||||
max_views: url.maxViews,
|
||||
views: url.views,
|
||||
user: exportData.user_map[url.userId],
|
||||
code: url.id,
|
||||
};
|
||||
}
|
||||
|
||||
const invites = await prisma.invite.findMany();
|
||||
|
||||
for (const invite of invites) {
|
||||
const uniqueId = randomChars(32);
|
||||
exportData.invite_map[invite.id] = uniqueId;
|
||||
|
||||
exportData.invites[uniqueId] = {
|
||||
code: invite.code,
|
||||
expites_at: invite.expiresAt?.toISOString() ?? undefined,
|
||||
created_at: invite.createdAt.toISOString(),
|
||||
used: invite.used,
|
||||
created_by_user: exportData.user_map[invite.createdById],
|
||||
};
|
||||
}
|
||||
|
||||
const stats = await prisma.stats.findMany();
|
||||
for (const stat of stats) {
|
||||
exportData.stats.push({
|
||||
created_at: stat.createdAt.toISOString(),
|
||||
data: stat.data,
|
||||
});
|
||||
}
|
||||
|
||||
exportData.request.user = exportData.user_map[user.id];
|
||||
|
||||
for (const folder of folders) {
|
||||
exportData.folders[exportData.folder_map[folder.id]].files = folder.files.map(
|
||||
(file) => exportData.file_map[file.id],
|
||||
);
|
||||
}
|
||||
|
||||
const stringed = JSON.stringify(exportData);
|
||||
logger.info(`${user.id} created export of size ${bytesToHuman(stringed.length)}`);
|
||||
|
||||
return res
|
||||
.setHeader('Content-Disposition', `attachment; filename="zipline_export_${Date.now()}.json"`)
|
||||
.setHeader('Content-Type', 'application/json')
|
||||
.send(stringed);
|
||||
}
|
||||
|
||||
export default withZipline(handler, {
|
||||
methods: ['GET'],
|
||||
user: true,
|
||||
administrator: true,
|
||||
});
|
||||
@@ -58,7 +58,7 @@ export default function Code({ code, id, title, render, renderType }) {
|
||||
|
||||
{!render && (
|
||||
<PrismCode
|
||||
sx={(t) => ({ height: '100vh', backgroundColor: t.colors.dark[8] })}
|
||||
sx={(t) => ({ height: '100vh', overflow: 'scroll', backgroundColor: t.colors.dark[8] })}
|
||||
code={code}
|
||||
ext={id.split('.').pop()}
|
||||
/>
|
||||
@@ -66,7 +66,7 @@ export default function Code({ code, id, title, render, renderType }) {
|
||||
|
||||
{render && overrideRender && (
|
||||
<PrismCode
|
||||
sx={(t) => ({ height: '100vh', backgroundColor: t.colors.dark[8] })}
|
||||
sx={(t) => ({ height: '100vh', overflow: 'scroll', backgroundColor: t.colors.dark[8] })}
|
||||
code={code}
|
||||
ext={id.split('.').pop()}
|
||||
/>
|
||||
|
||||
@@ -64,9 +64,14 @@ export default function EmbeddedFile({
|
||||
|
||||
const img = new Image();
|
||||
img.addEventListener('load', function () {
|
||||
// my best attempt of recreating https://searchfox.org/mozilla-central/source/dom/html/ImageDocument.cpp#271-276
|
||||
// and it actually works
|
||||
// my best attempt of recreating
|
||||
// firefox: https://searchfox.org/mozilla-central/source/dom/html/ImageDocument.cpp#271-276
|
||||
// chromium-based: https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/html/image_document.cc
|
||||
|
||||
// keeps image original if smaller than screen
|
||||
if (this.width <= window.innerWidth && this.height <= window.innerHeight) return;
|
||||
|
||||
// resizes to fit screen
|
||||
const ratio = Math.min(innerHeight / this.naturalHeight, innerWidth / this.naturalWidth);
|
||||
const newWidth = Math.max(1, Math.floor(ratio * this.naturalWidth));
|
||||
const newHeight = Math.max(1, Math.floor(ratio * this.naturalHeight));
|
||||
|
||||
@@ -2,6 +2,7 @@ import { File } from '@prisma/client';
|
||||
import { FastifyInstance, FastifyReply } from 'fastify';
|
||||
import fastifyPlugin from 'fastify-plugin';
|
||||
import exts from 'lib/exts';
|
||||
import { parseRangeHeader } from 'lib/utils/range';
|
||||
|
||||
function dbFileDecorator(fastify: FastifyInstance, _, done) {
|
||||
fastify.decorateReply('dbFile', dbFile);
|
||||
@@ -13,19 +14,29 @@ function dbFileDecorator(fastify: FastifyInstance, _, done) {
|
||||
const ext = file.name.split('.').pop();
|
||||
if (Object.keys(exts).includes(ext)) return this.server.nextHandle(this.request.raw, this.raw);
|
||||
|
||||
const data = await this.server.datasource.get(file.name);
|
||||
if (!data) return this.notFound();
|
||||
|
||||
const size = await this.server.datasource.size(file.name);
|
||||
if (size === null) return this.notFound();
|
||||
|
||||
this.header('Content-Length', size);
|
||||
// eslint-disable-next-line prefer-const
|
||||
let [rangeStart, rangeEnd] = parseRangeHeader(this.request.headers.range);
|
||||
if (rangeStart >= rangeEnd)
|
||||
return this.code(416)
|
||||
.header('Content-Range', `bytes */${size - 1}`)
|
||||
.send();
|
||||
if (rangeEnd === Infinity) rangeEnd = size - 1;
|
||||
|
||||
const data = await this.server.datasource.get(file.name, rangeStart, rangeEnd);
|
||||
|
||||
// only send content-range if the client asked for it
|
||||
if (this.request.headers.range) {
|
||||
this.code(206);
|
||||
this.header('Content-Range', `bytes ${rangeStart}-${rangeEnd}/${size}`);
|
||||
}
|
||||
|
||||
this.header('Content-Length', rangeEnd - rangeStart + 1);
|
||||
this.header('Content-Type', download ? 'application/octet-stream' : file.mimetype);
|
||||
this.header('Content-Disposition', `inline; filename="${encodeURI(file.originalName || file.name)}"`);
|
||||
if (file.mimetype.startsWith('video/') || file.mimetype.startsWith('audio/')) {
|
||||
this.header('Accept-Ranges', 'bytes');
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Range
|
||||
this.header('Content-Range', `bytes 0-${size - 1}/${size}`);
|
||||
}
|
||||
this.header('Accept-Ranges', 'bytes');
|
||||
|
||||
return this.send(data);
|
||||
}
|
||||
|
||||
@@ -28,23 +28,42 @@ if (isMainThread) {
|
||||
async function loadThumbnail(path) {
|
||||
const args = ['-i', path, '-frames:v', '1', '-f', 'mjpeg', 'pipe:1'];
|
||||
|
||||
const child = spawn(ffmpeg, args, { stdio: ['ignore', 'pipe', 'ignore'] });
|
||||
const child = spawn(ffmpeg, args, { stdio: ['ignore', 'pipe', 'pipe'] });
|
||||
|
||||
const data: Buffer = await new Promise((resolve, reject) => {
|
||||
const buffers = [];
|
||||
const errorBuffers = [];
|
||||
|
||||
child.stderr.on('data', (chunk) => {
|
||||
errorBuffers.push(chunk);
|
||||
});
|
||||
|
||||
child.stdout.on('data', (chunk) => {
|
||||
buffers.push(chunk);
|
||||
});
|
||||
|
||||
child.once('error', reject);
|
||||
child.once('error', (...a) => {
|
||||
console.log(a);
|
||||
|
||||
reject();
|
||||
});
|
||||
child.once('close', (code) => {
|
||||
if (code !== 0) {
|
||||
const msg = buffers.join('').trim();
|
||||
logger.debug(`cmd: ${ffmpeg} ${args.join(' ')}`);
|
||||
logger.error(`while ${path} child exited with code ${code}: ${msg}`);
|
||||
const msg = errorBuffers.join('').trim().split('\n');
|
||||
|
||||
reject(new Error(`child exited with code ${code}`));
|
||||
logger.debug(`cmd: ${ffmpeg} ${args.join(' ')}\n${msg.join('\n')}`);
|
||||
logger.error(`child exited with code ${code}: ${msg[msg.length - 1]}`);
|
||||
|
||||
if (msg[msg.length - 1].includes('does not contain any stream')) {
|
||||
// mismatched mimetype, for example a video/ogg (.ogg) file with no video stream since
|
||||
// for this specific case just set the mimetype to audio/ogg
|
||||
// the method will return an empty buffer since there is no video stream
|
||||
|
||||
logger.error(`file ${path} does not contain any video stream, it is probably an audio file`);
|
||||
resolve(Buffer.alloc(0));
|
||||
}
|
||||
|
||||
reject(new Error(`child exited with code ${code} ffmpeg output:\n${msg.join('\n')}`));
|
||||
} else {
|
||||
const buffer = Buffer.allocUnsafe(buffers.reduce((acc, val) => acc + val.length, 0));
|
||||
|
||||
@@ -99,6 +118,22 @@ async function start() {
|
||||
const thumbnail = await loadThumbnail(tmpFile);
|
||||
logger.debug(`loaded thumbnail: ${thumbnail.length} bytes mjpeg`);
|
||||
|
||||
if (thumbnail.length === 0 && file.mimetype === 'video/ogg') {
|
||||
logger.info('file might be an audio file, setting mimetype to audio/ogg to avoid future errors');
|
||||
await prisma.file.update({
|
||||
where: {
|
||||
id: file.id,
|
||||
},
|
||||
data: {
|
||||
mimetype: 'audio/ogg',
|
||||
},
|
||||
});
|
||||
|
||||
await rm(tmpFile);
|
||||
await prisma.$disconnect();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const { thumbnail: thumb } = await prisma.file.update({
|
||||
where: {
|
||||
id: file.id,
|
||||
|
||||
20
yarn.lock
20
yarn.lock
@@ -1956,6 +1956,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/node@npm:18":
|
||||
version: 18.19.67
|
||||
resolution: "@types/node@npm:18.19.67"
|
||||
dependencies:
|
||||
undici-types: ~5.26.4
|
||||
checksum: 700f92c6a0b63352ce6327286392adab30bb17623c2a788811e9cf092c4dc2fb5e36ca4727247a981b3f44185fdceef20950a3b7a8ab72721e514ac037022a08
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/node@npm:^10.0.3":
|
||||
version: 10.17.60
|
||||
resolution: "@types/node@npm:10.17.60"
|
||||
@@ -1970,15 +1979,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/node@npm:^18.18.10":
|
||||
version: 18.18.10
|
||||
resolution: "@types/node@npm:18.18.10"
|
||||
dependencies:
|
||||
undici-types: ~5.26.4
|
||||
checksum: 1245a14a38bfbe115b8af9792dbe87a1c015f2532af5f0a25a073343fefa7b2edfd95ff3830003d1a1278ce7f9ee0e78d4e5454d7a60af65832c8d77f4032ac8
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/normalize-package-data@npm:^2.4.0":
|
||||
version: 2.4.4
|
||||
resolution: "@types/normalize-package-data@npm:2.4.4"
|
||||
@@ -11827,7 +11827,7 @@ __metadata:
|
||||
"@types/katex": ^0.16.6
|
||||
"@types/minio": ^7.1.1
|
||||
"@types/multer": ^1.4.10
|
||||
"@types/node": ^18.18.10
|
||||
"@types/node": 18
|
||||
"@types/qrcode": ^1.5.5
|
||||
"@types/react": ^18.2.37
|
||||
"@types/sharp": ^0.32.0
|
||||
|
||||
Reference in New Issue
Block a user