Compare commits

..

13 Commits

Author SHA1 Message Date
diced
1e2b8efb13 feat(v3.4.5): version 2022-06-19 17:46:45 -07:00
diced
8495963094 feat(v3.4.5): exporting images and more stuff 2022-06-19 17:46:20 -07:00
diced
06d1c0bc3b fix(api): make delete user with images actually delete their images from the datasource 2022-06-19 17:26:52 -07:00
diced
5965c2e237 fix(config): extention -> extension 2022-06-19 16:44:55 -07:00
diced
fb34dfadb0 fix(config): make endpoint nullable 2022-06-18 13:47:59 -07:00
diced
13b0ac737b feat(datasource): s3 path styles 2022-06-18 13:39:12 -07:00
diced
300430b3ec Merge branch 'trunk' of github.com:diced/zipline into trunk 2022-06-18 12:38:42 -07:00
NebulaBC
cf6f154e6e fix: add env vars for s3 endpoint (#153) 2022-06-17 19:05:20 -07:00
diced
2ddf8c0cdb fix(api): password protected images wont show up on root 2022-06-17 15:35:29 -07:00
NebulaBC
2a402f77b5 feat(api): S3 endpoint support (#152)
* S3 endpoint support

Adding endpoint support to S3 allows for other S3-compatible uploaders to be used

* Fix formatting error
2022-06-17 14:29:34 -07:00
Han Cen
7b2c31658a feat(api): root uploader route (#150) 2022-06-17 14:20:21 -07:00
Han Cen
7a91a60af9 fix: fix build (#149) 2022-06-17 08:35:53 -07:00
diced
bfa6c70bf3 chore(deps): update stuff 2022-06-16 14:22:26 -07:00
25 changed files with 2200 additions and 1354 deletions

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,7 @@
nodeLinker: node-modules
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
spec: "@yarnpkg/plugin-interactive-tools"
yarnPath: .yarn/releases/yarn-3.2.1.cjs

View File

@@ -16,4 +16,4 @@ length = 6
directory = './uploads'
user_limit = 104900000 # 100mb
admin_limit = 104900000 # 100mb
disabled_extentions = ['jpg']
disabled_extensions = ['jpg']

View File

@@ -8,4 +8,7 @@ module.exports = {
},
];
},
api: {
responseLimit: false,
},
};

View File

@@ -1,6 +1,6 @@
{
"name": "zipline",
"version": "3.4.4",
"version": "3.4.5",
"license": "MIT",
"scripts": {
"dev": "node esbuild.config.js && REACT_EDITOR=code-insiders NODE_ENV=development node dist/server",
@@ -18,48 +18,48 @@
},
"dependencies": {
"@iarna/toml": "2.2.5",
"@mantine/core": "^3.6.9",
"@mantine/dropzone": "^3.6.9",
"@mantine/hooks": "^3.6.9",
"@mantine/modals": "^3.6.9",
"@mantine/next": "^3.6.9",
"@mantine/notifications": "^3.6.9",
"@mantine/prism": "^3.6.11",
"@mantine/core": "^4.2.9",
"@mantine/dropzone": "^4.2.9",
"@mantine/hooks": "^4.2.9",
"@mantine/modals": "^4.2.9",
"@mantine/next": "^4.2.9",
"@mantine/notifications": "^4.2.9",
"@mantine/prism": "^4.2.9",
"@modulz/radix-icons": "^4.0.0",
"@prisma/client": "^3.14.0",
"@prisma/migrate": "^3.14.0",
"@prisma/sdk": "^3.14.0",
"@reduxjs/toolkit": "^1.6.0",
"argon2": "^0.28.2",
"aws-sdk": "^2.1085.0",
"colorette": "^1.2.2",
"cookie": "^0.4.1",
"fecha": "^4.2.1",
"@prisma/client": "^3.15.2",
"@prisma/migrate": "^3.15.2",
"@prisma/sdk": "^3.15.2",
"@reduxjs/toolkit": "^1.8.2",
"argon2": "^0.28.5",
"aws-sdk": "^2.1156.0",
"colorette": "^2.0.19",
"cookie": "^0.5.0",
"fecha": "^4.2.3",
"fflate": "^0.7.3",
"find-my-way": "^5.2.0",
"multer": "^1.4.4",
"next": "^12.1.0",
"prisma": "^3.14.0",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-redux": "^7.2.4",
"react-table": "^7.7.0",
"redux": "^4.1.0",
"redux-thunk": "^2.3.0",
"find-my-way": "^6.3.0",
"multer": "^1.4.5-lts.1",
"next": "^12.1.6",
"prisma": "^3.15.2",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-redux": "^8.0.2",
"react-table": "^7.8.0",
"redux": "^4.2.0",
"redux-thunk": "^2.4.1",
"uuid": "^8.3.2",
"yup": "^0.32.9"
"yup": "^0.32.11"
},
"devDependencies": {
"@types/cookie": "^0.4.0",
"@types/multer": "^1.4.6",
"@types/cookie": "^0.5.1",
"@types/multer": "^1.4.7",
"@types/node": "^15.12.2",
"babel-plugin-import": "^1.13.3",
"esbuild": "^0.14.23",
"babel-plugin-import": "^1.13.5",
"esbuild": "^0.14.44",
"eslint": "^7.32.0",
"eslint-config-next": "11.0.0",
"eslint-config-next": "12.1.6",
"npm-run-all": "^4.1.5",
"ts-node": "^10.0.0",
"typescript": "^4.3.2"
"ts-node": "^10.8.1",
"typescript": "^4.7.3"
},
"repository": {
"type": "git",

View File

@@ -123,6 +123,7 @@ export default function Layout({ children, user }) {
const openResetToken = () => modals.openConfirmModal({
title: 'Reset Token',
centered: true,
overlayBlur: 3,
children: (
<Text size='sm'>
Once you reset your token, you will have to update any uploaders to use this new token.
@@ -155,6 +156,7 @@ export default function Layout({ children, user }) {
const openCopyToken = () => modals.openConfirmModal({
title: 'Copy Token',
centered: true,
overlayBlur: 3,
children: (
<Text size='sm'>
Make sure you don&apos;t share this token with anyone as they will be able to upload files on your behalf.
@@ -181,7 +183,7 @@ export default function Layout({ children, user }) {
fixed
navbar={
<Navbar
padding='md'
p='md'
hiddenBreakpoint='sm'
hidden={!opened}
width={{ sm: 200, lg: 230 }}
@@ -247,7 +249,7 @@ export default function Layout({ children, user }) {
</Navbar>
}
header={
<Header height={70} padding='md'>
<Header height={70} p='md'>
<div style={{ display: 'flex', alignItems: 'center', height: '100%' }}>
<MediaQuery largerThan='sm' styles={{ display: 'none' }}>
<Burger
@@ -325,7 +327,7 @@ export default function Layout({ children, user }) {
</Header>
}
>
<Paper withBorder padding='md' shadow='xs'>{children}</Paper>
<Paper withBorder p='md' shadow='xs'>{children}</Paper>
</AppShell>
);
}

View File

@@ -1,13 +1,14 @@
import React, { useState } from 'react';
import React, { useEffect, useState } from 'react';
import useFetch from 'hooks/useFetch';
import Link from 'components/Link';
import { useStoreDispatch, useStoreSelector } from 'lib/redux/store';
import { updateUser } from 'lib/redux/reducers/user';
import { randomId, useForm } from '@mantine/hooks';
import { Tooltip, TextInput, Button, Text, Title, Group, ColorInput, MultiSelect, Space } from '@mantine/core';
import { DownloadIcon, Cross1Icon } from '@modulz/radix-icons';
import { randomId, useForm, useInterval } from '@mantine/hooks';
import { Card, Tooltip, TextInput, Button, Text, Title, Group, ColorInput, MultiSelect, Space, Box, Table } from '@mantine/core';
import { DownloadIcon, Cross1Icon, TrashIcon } from '@modulz/radix-icons';
import { useNotifications } from '@mantine/notifications';
import { useModals } from '@mantine/modals';
function VarsTooltip({ children }) {
return (
@@ -25,11 +26,44 @@ function VarsTooltip({ children }) {
);
}
function ExportDataTooltip({ children }) {
return <Tooltip position='top' placement='center' color='' label='After clicking, if you have a lot of files the export can take a while to complete. A list of previous exports will be below to download.'>{children}</Tooltip>;
}
function ExportTable({ rows, columns }) {
return (
<Box sx={{ pt: 1 }} >
<Table highlightOnHover>
<thead>
<tr>
{columns.map(col => (
<th key={randomId()}>{col.name}</th>
))}
</tr>
</thead>
<tbody>
{rows.map(row => (
<tr key={randomId()}>
{columns.map(col => (
<td key={randomId()}>
{col.format ? col.format(row[col.id]) : row[col.id]}
</td>
))}
</tr>
))}
</tbody>
</Table>
</Box>
);
}
export default function Manage() {
const user = useStoreSelector(state => state.user);
const dispatch = useStoreDispatch();
const notif = useNotifications();
const modals = useModals();
const [exports, setExports] = useState([]);
const [domains, setDomains] = useState(user.domains ?? []);
const genShareX = (withEmbed: boolean = false, withZws: boolean = false) => {
@@ -41,8 +75,8 @@ export default function Manage() {
RequestURL: `${window.location.protocol + '//' + window.location.hostname + (window.location.port ? ':' + window.location.port : '')}/api/upload`,
Headers: {
Authorization: user?.token,
...(withEmbed && {Embed: 'true'}),
...(withZws && {ZWS: 'true'}),
...(withEmbed && { Embed: 'true' }),
...(withZws && { ZWS: 'true' }),
},
URL: '$json:files[0]$',
Body: 'MultipartFormData',
@@ -127,6 +161,78 @@ export default function Manage() {
}
};
const exportData = async () => {
const res = await useFetch('/api/user/export', 'POST');
if (res.url) {
notif.showNotification({
title: 'Export started...',
loading: true,
message: 'If you have a lot of files, the export may take a while. The list of exports will be updated every 30s.',
});
}
};
const getExports = async () => {
const res = await useFetch('/api/user/export');
setExports(res.exports.map(s => ({
date: new Date(Number(s.split('_')[3].slice(0, -4))),
full: s,
})).sort((a, b) => a.date.getTime() - b.date.getTime()));
};
const handleDelete = async () => {
const res = await useFetch('/api/user/files', 'DELETE', {
all: true,
});
if (!res.count) {
notif.showNotification({
title: 'Couldn\'t delete files',
message: res.error,
color: 'red',
icon: <Cross1Icon />,
});
} else {
notif.showNotification({
title: 'Deleted files',
message: `${res.count} files deleted`,
color: 'green',
icon: <TrashIcon />,
});
}
};
const openDeleteModal = () => modals.openConfirmModal({
title: 'Are you sure you want to delete all of your images?',
closeOnConfirm: false,
centered: true,
overlayBlur: 3,
labels: { confirm: 'Yes', cancel: 'No' },
onConfirm: () => {
modals.openConfirmModal({
title: 'Are you really sure?',
centered: true,
overlayBlur: 3,
labels: { confirm: 'Yes', cancel: 'No' },
onConfirm: () => {
handleDelete();
modals.closeAll();
},
onCancel: () => {
modals.closeAll();
},
});
},
});
const interval = useInterval(() => getExports(), 30000);
useEffect(() => {
getExports();
interval.start();
}, []);
return (
<>
<Title>Manage User</Title>
@@ -159,6 +265,24 @@ export default function Manage() {
</Group>
</form>
<Title sx={{ paddingTop: 12 }}>Manage Data</Title>
<Text color='gray' sx={{ paddingBottom: 12 }}>Delete, or export your data into a zip file.</Text>
<Group>
<Button onClick={openDeleteModal} rightIcon={<TrashIcon />}>Delete All Data</Button>
<ExportDataTooltip><Button onClick={exportData} rightIcon={<DownloadIcon />}>Export Data</Button></ExportDataTooltip>
</Group>
<Card mt={22}>
<ExportTable
columns={[
{ id: 'name', name: 'Name' },
{ id: 'date', name: 'Date' },
]}
rows={exports ? exports.map((x, i) => ({
name: <Link href={'/api/user/export?name=' + x.full}>Export {i + 1}</Link>,
date: x.date.toLocaleString(),
})) : []} />
</Card>
<Title sx={{ paddingTop: 12, paddingBottom: 12 }}>ShareX Config</Title>
<Group>
<Button onClick={() => genShareX(false)} rightIcon={<DownloadIcon />}>ShareX Config</Button>
@@ -167,4 +291,4 @@ export default function Manage() {
</Group>
</>
);
}
}

View File

@@ -109,11 +109,14 @@ export default function Users() {
title: `Delete ${user.username}?`,
closeOnConfirm: false,
centered: true,
overlayBlur: 3,
labels: { confirm: 'Yes', cancel: 'No' },
onConfirm: () => {
modals.openConfirmModal({
title: `Delete ${user.username}'s images?`,
labels: { confirm: 'Yes', cancel: 'No' },
centered: true,
overlayBlur: 3,
onConfirm: () => {
handleDelete(user, true);
modals.closeAll();

View File

@@ -1,7 +1,6 @@
import type { Config } from './types';
import readConfig from './readConfig';
import validateConfig from '../server/validateConfig';
if (!global.config) global.config = validateConfig(readConfig()) as unknown as Config;
if (!global.config) global.config = validateConfig(readConfig());
export default global.config;

View File

@@ -1,4 +1,4 @@
import { createReadStream, ReadStream } from 'fs';
import { createReadStream, existsSync, ReadStream } from 'fs';
import { readdir, rm, stat, writeFile } from 'fs/promises';
import { join } from 'path';
import { Datasource } from './datasource';
@@ -19,8 +19,11 @@ export class Local extends Datasource {
}
public get(file: string): ReadStream {
const full = join(process.cwd(), this.path, file);
if (!existsSync(full)) return null;
try {
return createReadStream(join(process.cwd(), this.path, file));
return createReadStream(full);
} catch (e) {
return null;
}

View File

@@ -1,27 +1,28 @@
import { Datasource } from './datasource';
import AWS from 'aws-sdk';
import { Readable } from 'stream';
import { ConfigS3Datasource } from 'lib/types';
export class S3 extends Datasource {
public name: string = 'S3';
public s3: AWS.S3;
public constructor(
public accessKey: string,
public secretKey: string,
public bucket: string,
public config: ConfigS3Datasource,
) {
super();
this.s3 = new AWS.S3({
accessKeyId: accessKey,
secretAccessKey: secretKey,
accessKeyId: config.access_key_id,
endpoint: config.endpoint || null,
s3ForcePathStyle: config.force_s3_path,
secretAccessKey: config.secret_access_key,
});
}
public async save(file: string, data: Buffer): Promise<void> {
return new Promise((resolve, reject) => {
this.s3.upload({
Bucket: this.bucket,
Bucket: this.config.bucket,
Key: file,
Body: data,
}, err => {
@@ -37,7 +38,7 @@ export class S3 extends Datasource {
public async delete(file: string): Promise<void> {
return new Promise((resolve, reject) => {
this.s3.deleteObject({
Bucket: this.bucket,
Bucket: this.config.bucket,
Key: file,
}, err => {
if (err) {
@@ -52,7 +53,7 @@ export class S3 extends Datasource {
public get(file: string): Readable {
// Unfortunately, aws-sdk is bad and the stream still loads everything into memory.
return this.s3.getObject({
Bucket: this.bucket,
Bucket: this.config.bucket,
Key: file,
}).createReadStream();
}
@@ -60,7 +61,7 @@ export class S3 extends Datasource {
public async size(): Promise<number> {
return new Promise((resolve, reject) => {
this.s3.listObjects({
Bucket: this.bucket,
Bucket: this.config.bucket,
}, (err, data) => {
if (err) {
reject(err);

View File

@@ -6,7 +6,7 @@ if (!global.datasource) {
switch (config.datasource.type) {
case 's3':
Logger.get('datasource').info(`Using S3(${config.datasource.s3.bucket}) datasource`);
global.datasource = new S3(config.datasource.s3.access_key_id, config.datasource.s3.secret_access_key, config.datasource.s3.bucket);
global.datasource = new S3(config.datasource.s3);
break;
case 'local':
Logger.get('datasource').info(`Using local(${config.datasource.local.directory}) datasource`);

View File

@@ -19,13 +19,15 @@ const envValues = [
e('DATASOURCE_LOCAL_DIRECTORY', 'string', (c, v) => c.datasource.local.directory = v),
e('DATASOURCE_S3_ACCESS_KEY_ID', 'string', (c, v) => c.datasource.s3.access_key_id = v ),
e('DATASOURCE_S3_SECRET_ACCESS_KEY', 'string', (c, v) => c.datasource.s3.secret_access_key = v),
e('DATASOURCE_S3_ENDPOINT', 'string', (c, v) => c.datasource.s3.endpoint = v ?? null),
e('DATASOURCE_S3_FORCE_S3_PATH', 'string', (c, v) => c.datasource.s3.force_s3_path = v ?? false),
e('DATASOURCE_S3_BUCKET', 'string', (c, v) => c.datasource.s3.bucket = v),
e('UPLOADER_ROUTE', 'string', (c, v) => c.uploader.route = v),
e('UPLOADER_LENGTH', 'number', (c, v) => c.uploader.length = v),
e('UPLOADER_ADMIN_LIMIT', 'number', (c, v) => c.uploader.admin_limit = v),
e('UPLOADER_USER_LIMIT', 'number', (c, v) => c.uploader.user_limit = v),
e('UPLOADER_DISABLED_EXTS', 'array', (c, v) => v ? c.uploader.disabled_extentions = v : c.uploader.disabled_extentions = []),
e('UPLOADER_DISABLED_EXTS', 'array', (c, v) => v ? c.uploader.disabled_extensions = v : c.uploader.disabled_extensions = []),
e('URLS_ROUTE', 'string', (c, v) => c.urls.route = v),
e('URLS_LENGTH', 'number', (c, v) => c.urls.length = v),
@@ -68,7 +70,9 @@ function tryReadEnv(): Config {
s3: {
access_key_id: undefined,
secret_access_key: undefined,
endpoint: undefined,
bucket: undefined,
force_s3_path: undefined,
},
},
uploader: {
@@ -76,7 +80,7 @@ function tryReadEnv(): Config {
length: undefined,
admin_limit: undefined,
user_limit: undefined,
disabled_extentions: undefined,
disabled_extensions: undefined,
},
urls: {
route: undefined,
@@ -120,4 +124,4 @@ function parseToBoolean(value) {
function parseToArray(value) {
return value.split(',');
}
}

View File

@@ -1,54 +1,13 @@
// https://github.com/mikecao/umami/blob/master/redux/store.js
import { useMemo } from 'react';
import { Action, CombinedState, configureStore, EnhancedStore } from '@reduxjs/toolkit';
import thunk, { ThunkAction } from 'redux-thunk';
import { configureStore } from '@reduxjs/toolkit';
import rootReducer from './reducers';
import { User } from './reducers/user';
import { useDispatch, TypedUseSelectorHook, useSelector } from 'react-redux';
let store: EnhancedStore<CombinedState<{
user: User;
}>>;
export function getStore(preloadedState) {
return configureStore({
reducer: rootReducer,
middleware: [thunk],
preloadedState,
});
}
export const initializeStore = preloadedState => {
let _store = store ?? getStore(preloadedState);
if (preloadedState && store) {
_store = getStore({
...store.getState(),
...preloadedState,
});
store = undefined;
}
if (typeof window === 'undefined') return _store;
if (!store) store = _store;
return _store;
};
export function useStore(initialState?: User) {
return useMemo(() => initializeStore(initialState), [initialState]);
}
export const store = configureStore({
reducer: rootReducer,
});
export type AppState = ReturnType<typeof store.getState>
export type AppDispatch = typeof store.dispatch
export type AppThunk<ReturnType = void> = ThunkAction<
ReturnType,
AppState,
unknown,
Action<User>
>
export const useStoreDispatch = () => useDispatch<AppDispatch>();
export const useStoreSelector: TypedUseSelectorHook<AppState> = useSelector;

View File

@@ -38,7 +38,9 @@ export interface ConfigLocalDatasource {
export interface ConfigS3Datasource {
access_key_id: string;
secret_access_key: string;
endpoint?: string;
bucket: string;
force_s3_path: boolean;
}
export interface ConfigUploader {
@@ -55,7 +57,7 @@ export interface ConfigUploader {
user_limit: number;
// Disabled extensions to block from uploading
disabled_extentions: string[];
disabled_extensions: string[];
}
export interface ConfigUrls {

View File

@@ -33,11 +33,11 @@ export default function EmbeddedImage({ image, user, pass }) {
const imageEl = document.getElementById('image_content') as HTMLImageElement;
const img = new Image();
img.addEventListener('load', function() {
img.addEventListener('load', function () {
if (this.naturalWidth > innerWidth) imageEl.width = Math.floor(this.naturalWidth * Math.min((innerHeight / this.naturalHeight), (innerWidth / this.naturalWidth)));
else imageEl.width = this.naturalWidth;
});
img.src = url || dataURL('/r');
if (url) {
imageEl.src = url;
@@ -71,7 +71,7 @@ export default function EmbeddedImage({ image, user, pass }) {
onClose={() => setOpened(false)}
title='Password Protected'
centered={true}
hideCloseButton={true}
withCloseButton={true}
closeOnEscape={false}
closeOnClickOutside={false}
>
@@ -95,11 +95,13 @@ export default function EmbeddedImage({ image, user, pass }) {
}
export const getServerSideProps: GetServerSideProps = async (context) => {
const id = context.params.id[1];
const route = context.params.id[0];
const routes = [config.uploader.route.substring(1), config.urls.route.substring(1)];
if (!routes.includes(route)) return { notFound: true };
if (route === routes[1]) {
const serve_on_root = /(^[^\\.]+\.[^\\.]+)/.test(route);
const id = serve_on_root ? route : context.params.id[1];
const uploader_route = config.uploader.route.substring(1);
if (route === config.urls.route.substring(1)) {
const url = await prisma.url.findFirst({
where: {
OR: [
@@ -121,7 +123,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
},
};
} else {
} else if (uploader_route === '' ? /(^[^\\.]+\.[^\\.]+)/.test(route) : route === uploader_route) {
const image = await prisma.image.findFirst({
where: {
OR: [
@@ -184,5 +186,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
pass,
},
};
} else {
return { notFound: true };
}
};

View File

@@ -1,12 +1,10 @@
import React from 'react';
import { Provider } from 'react-redux';
import Head from 'next/head';
import { useStore } from 'lib/redux/store';
import { store } from 'lib/redux/store';
import ZiplineTheming from 'components/Theming';
export default function MyApp({ Component, pageProps }) {
const store = useStore();
return (
<Provider store={store}>
<Head>

View File

@@ -38,7 +38,7 @@ async function handler(req: NextApiReq, res: NextApiRes) {
if (file.size > zconfig.uploader[user.administrator ? 'admin_limit' : 'user_limit']) return res.error(`file[${i}] size too big`);
const ext = file.originalname.split('.').pop();
if (zconfig.uploader.disabled_extentions.includes(ext)) return res.error('disabled extension recieved: ' + ext);
if (zconfig.uploader.disabled_extensions.includes(ext)) return res.error('disabled extension recieved: ' + ext);
let fileName: string;
switch (format) {
@@ -79,9 +79,9 @@ async function handler(req: NextApiReq, res: NextApiRes) {
Logger.get('image').info(`User ${user.username} (${user.id}) uploaded an image ${image.file} (${image.id})`);
if (user.domains.length) {
const domain = user.domains[Math.floor(Math.random() * user.domains.length)];
files.push(`${domain}${zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
files.push(`${domain}${zconfig.uploader.route === '/' ? '' : zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
} else {
files.push(`${zconfig.core.secure ? 'https' : 'http'}://${req.headers.host}${zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
files.push(`${zconfig.core.secure ? 'https' : 'http'}://${req.headers.host}${zconfig.uploader.route === '/' ? '' : zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
}
}

View File

@@ -0,0 +1,134 @@
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma';
import Logger from 'lib/logger';
import { Zip, ZipPassThrough } from 'fflate';
import datasource from 'lib/ds';
import { readdir } from 'fs/promises';
import { createReadStream, createWriteStream } from 'fs';
async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user();
if (!user) return res.forbid('not logged in');
if (req.method === 'POST') {
const files = await prisma.image.findMany({
where: {
userId: user.id,
},
});
const zip = new Zip();
const export_name = `zipline_export_${user.id}_${Date.now()}.zip`;
const write_stream = createWriteStream(`/tmp/${export_name}`);
const onBackpressure = (stream, outputStream, cb) => {
const runCb = () => {
// Pause if either output or internal backpressure should be applied
cb(applyOutputBackpressure || backpressureBytes > backpressureThreshold);
};
// Internal backpressure (for when AsyncZipDeflate is slow)
const backpressureThreshold = 65536;
let backpressure = [];
let backpressureBytes = 0;
const push = stream.push;
stream.push = (dat, final) => {
backpressure.push(dat.length);
backpressureBytes += dat.length;
runCb();
push.call(stream, dat, final);
};
let ondata = stream.ondata;
const ondataPatched = (err, dat, final) => {
ondata.call(stream, err, dat, final);
backpressureBytes -= backpressure.shift();
runCb();
};
if (ondata) {
stream.ondata = ondataPatched;
} else {
// You can remove this condition if you make sure to
// call zip.add(file) before calling onBackpressure
Object.defineProperty(stream, 'ondata', {
get: () => ondataPatched,
set: cb => ondata = cb,
});
}
// Output backpressure (for when outputStream is slow)
let applyOutputBackpressure = false;
const write = outputStream.write;
outputStream.write = (data) => {
const outputNotFull = write.call(outputStream, data);
applyOutputBackpressure = !outputNotFull;
runCb();
};
outputStream.on('drain', () => {
applyOutputBackpressure = false;
runCb();
});
};
zip.ondata = async (err, data, final) => {
if (!err) {
write_stream.write(data);
if (final) {
write_stream.close();
Logger.get('user').info(`Export for ${user.username} (${user.id}) has completed and is available at ${export_name}`);
}
} else {
write_stream.close();
Logger.get('user').error(`Export for ${user.username} (${user.id}) has failed\n${err}`);
}
};
// for (const file of files) {
Logger.get('user').info(`Export for ${user.username} (${user.id}) has started`);
for (let i = 0; i !== files.length; ++i) {
const file = files[i];
const stream = datasource.get(file.file);
if (stream) {
const def = new ZipPassThrough(file.file);
zip.add(def);
onBackpressure(def, stream, shouldApplyBackpressure => {
if (shouldApplyBackpressure) {
stream.pause();
} else if (stream.isPaused()) {
stream.resume();
}
});
stream.on('data', c => def.push(c));
stream.on('end', () => def.push(new Uint8Array(0), true));
}
}
zip.end();
res.json({
url: '/api/user/export?name=' + export_name,
});
} else {
const export_name = req.query.name as string;
if (export_name) {
const parts = export_name.split('_');
if (Number(parts[2]) !== user.id) return res.forbid('cannot access export');
const stream = createReadStream(`/tmp/${export_name}`);
res.setHeader('Content-Type', 'application/zip');
res.setHeader('Content-Disposition', `attachment; filename="${export_name}"`);
stream.pipe(res);
} else {
const files = await readdir('/tmp');
const exports = files.filter(f => f.startsWith('zipline_export_'));
res.json({
exports,
});
}
}
}
export default withZipline(handler);

View File

@@ -9,20 +9,41 @@ async function handler(req: NextApiReq, res: NextApiRes) {
if (!user) return res.forbid('not logged in');
if (req.method === 'DELETE') {
if (!req.body.id) return res.error('no file id');
if (req.body.all) {
const files = await prisma.image.findMany({
where: {
userId: user.id,
},
});
const image = await prisma.image.delete({
where: {
id: req.body.id,
},
});
for (let i = 0; i !== files.length; ++i) {
await datasource.delete(files[i].file);
}
await datasource.delete(image.file);
const { count } = await prisma.image.deleteMany({
where: {
userId: user.id,
},
});
Logger.get('image').info(`User ${user.username} (${user.id}) deleted ${count} images.`);
Logger.get('image').info(`User ${user.username} (${user.id}) deleted an image ${image.file} (${image.id})`);
return res.json({ count });
} else {
if (!req.body.id) return res.error('no file id');
delete image.password;
return res.json(image);
const image = await prisma.image.delete({
where: {
id: req.body.id,
},
});
await datasource.delete(image.file);
Logger.get('image').info(`User ${user.username} (${user.id}) deleted an image ${image.file} (${image.id})`);
delete image.password;
return res.json(image);
}
} else if (req.method === 'PATCH') {
if (!req.body.id) return res.error('no file id');

View File

@@ -1,6 +1,7 @@
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma';
import Logger from 'lib/logger';
import datasource from 'lib/ds';
async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user();
@@ -18,6 +19,16 @@ async function handler(req: NextApiReq, res: NextApiRes) {
if (!deleteUser) return res.forbid('user doesn\'t exist');
if (req.body.delete_images) {
const files = await prisma.image.findMany({
where: {
userId: deleteUser.id,
},
});
for (let i = 0; i !== files.length; ++i) {
await datasource.delete(files[i].file);
}
const { count } = await prisma.image.deleteMany({
where: {
userId: deleteUser.id,

View File

@@ -63,7 +63,7 @@ export default class Server {
},
});
this.router.on('GET', `${config.uploader.route}/:id`, async (req, res, params) => {
this.router.on('GET', config.uploader.route === '/' ? '/:id(^[^\\.]+\\.[^\\.]+)' : `${config.uploader.route}/:id`, async (req, res, params) => {
const image = await this.prisma.image.findFirst({
where: {
OR: [
@@ -72,6 +72,7 @@ export default class Server {
],
},
});
console.log(image);
if (!image) await this.rawFile(req, res, params.id);

View File

@@ -19,7 +19,9 @@ const validator = object({
s3: object({
access_key_id: string(),
secret_access_key: string(),
endpoint: string().notRequired().nullable(),
bucket: string(),
force_s3_path: boolean().default(false),
}).notRequired(),
}).required(),
uploader: object({
@@ -49,12 +51,12 @@ export default function validate(config): Config {
if (!validated.datasource.s3.access_key_id) errors.push('datasource.s3.access_key_id is a required field');
if (!validated.datasource.s3.secret_access_key) errors.push('datasource.s3.secret_access_key is a required field');
if (!validated.datasource.s3.bucket) errors.push('datasource.s3.bucket is a required field');
if (errors.length) throw { errors };
if (errors.length) throw { errors };
}
return validated as unknown as Config;
} catch (e) {
if (process.env.ZIPLINE_DOCKER_BUILD) return null;
throw `${e.errors.length} errors occured\n${e.errors.map(x => '\t' + x).join('\n')}`;
}
}
};

View File

@@ -20,11 +20,20 @@
"noEmit": true,
"baseUrl": "src",
"paths": {
"components/*": ["components/*"],
"hooks/*": ["lib/hooks/*"],
"middleware/*": ["lib/middleware/*"],
"lib/*": ["lib/*"]
}
"components/*": [
"components/*"
],
"hooks/*": [
"lib/hooks/*"
],
"middleware/*": [
"lib/middleware/*"
],
"lib/*": [
"lib/*"
]
},
"incremental": true
},
"include": [
"next-env.d.ts",

2440
yarn.lock

File diff suppressed because it is too large Load Diff