Compare commits

..

29 Commits

Author SHA1 Message Date
diced
1e2b8efb13 feat(v3.4.5): version 2022-06-19 17:46:45 -07:00
diced
8495963094 feat(v3.4.5): exporting images and more stuff 2022-06-19 17:46:20 -07:00
diced
06d1c0bc3b fix(api): make delete user with images actually delete their images from the datasource 2022-06-19 17:26:52 -07:00
diced
5965c2e237 fix(config): extention -> extension 2022-06-19 16:44:55 -07:00
diced
fb34dfadb0 fix(config): make endpoint nullable 2022-06-18 13:47:59 -07:00
diced
13b0ac737b feat(datasource): s3 path styles 2022-06-18 13:39:12 -07:00
diced
300430b3ec Merge branch 'trunk' of github.com:diced/zipline into trunk 2022-06-18 12:38:42 -07:00
NebulaBC
cf6f154e6e fix: add env vars for s3 endpoint (#153) 2022-06-17 19:05:20 -07:00
diced
2ddf8c0cdb fix(api): password protected images wont show up on root 2022-06-17 15:35:29 -07:00
NebulaBC
2a402f77b5 feat(api): S3 endpoint support (#152)
* S3 endpoint support

Adding endpoint support to S3 allows for other S3-compatible uploaders to be used

* Fix formatting error
2022-06-17 14:29:34 -07:00
Han Cen
7b2c31658a feat(api): root uploader route (#150) 2022-06-17 14:20:21 -07:00
Han Cen
7a91a60af9 fix: fix build (#149) 2022-06-17 08:35:53 -07:00
diced
bfa6c70bf3 chore(deps): update stuff 2022-06-16 14:22:26 -07:00
Jonathan
73eff05180 feat: use yarn v3 (#136)
* feat: use yarn v3

* chore: bump yarn to 3.2.1
2022-06-06 16:38:15 -07:00
relaxtakenotes
74f3b3f13d fix: image width not being set properly (#143)
* Fix image width not being set properly

Sometimes it got set to 0 because the original image wasn't loaded yet.

* fix: eslint

Co-authored-by: dicedtomato <35403473+diced@users.noreply.github.com>
2022-06-04 22:18:07 -07:00
diced
181833d768 Merge branch 'trunk' of github.com:diced/zipline into trunk 2022-06-04 22:05:26 -07:00
diced
be9523304a feat(v3.4.4): fix many bugs and password protected uploads 2022-06-04 22:05:08 -07:00
dicedtomato
b26fef3ad4 fix(docker): add restart policy for postgres 2022-03-26 20:37:02 +00:00
diced
9f86674bbe fix: update security policy 2022-03-14 20:31:18 -07:00
diced
095e57a037 fix(actions): arm -> arm64 2022-03-14 20:22:39 -07:00
diced
66a8e3bb79 feat: arm docker-compose file 2022-03-13 20:05:41 -07:00
diced
473137abdf fix(actions): fix arm action path 2022-03-13 19:36:28 -07:00
diced
740f1605e7 fix(actions): maybe fix actions 2022-03-13 19:30:37 -07:00
diced
0922ec020e fix: revert to node 16 on actions 2022-03-13 19:27:32 -07:00
diced
dbe8291f55 fix(actions): maybe fix arm action 2022-03-13 19:26:33 -07:00
diced
9dcc16277e refactor(actions): update to v2 of build-push-action & push arm image 2022-03-13 19:25:11 -07:00
diced
aa611fa6ba feat(v3.4.3): cleanup, fix memory leak, arm support 2022-03-13 19:13:18 -07:00
diced
083040e300 feat(v3.4.2): random domain selection #129 2022-03-03 17:52:34 -08:00
diced
99e92e4594 feat(v3.4.1): datasource api, for S3 functionality 2022-03-02 22:04:56 -08:00
64 changed files with 10831 additions and 5931 deletions

View File

@@ -2,3 +2,6 @@ node_modules/
.next/
uploads/
.git/
.yarn/*
!.yarn/releases
!.yarn/plugins

41
.github/workflows/docker-arm.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: 'CD: Push ARM64 Docker Images'
on:
push:
branches: [ trunk ]
paths:
- 'src/**'
- 'server/**'
- 'prisma/**'
- '.github/**'
workflow_dispatch:
jobs:
push_to_ghcr:
name: Push Image to GitHub Packages
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Login to Github Packages
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v2
with:
file: ./Dockerfile-arm
platforms: linux/arm64
push: true
tags: ghcr.io/diced/zipline/arm64:trunk

View File

@@ -18,28 +18,31 @@ jobs:
- name: Check out the repo
uses: actions/checkout@v2
- name: Push to GitHub Packages
uses: docker/build-push-action@v1
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Login to Github Packages
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
repository: diced/zipline/zipline
dockerfile: Dockerfile
tag_with_ref: true
push_to_dockerhub:
name: Push Image to Docker Hub
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Push to Docker Hub
uses: docker/build-push-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: diced/zipline
dockerfile: Dockerfile
tag_with_ref: true
- name: Build Docker Image
uses: docker/build-push-action@v2
with:
push: true
tags: |
ghcr.io/diced/zipline/zipline:trunk
ghcr.io/diced/zipline/amd64:trunk
diced/zipline:trunk

9
.gitignore vendored
View File

@@ -5,6 +5,11 @@
/.pnp
.pnp.js
# yarn
.yarn/*
!.yarn/releases
!.yarn/plugins
# testing
/coverage
@@ -36,4 +41,6 @@ yarn-error.log*
# zipline
config.toml
uploads/
uploads/
dist/
docker-compose.local.yml

File diff suppressed because one or more lines are too long

786
.yarn/releases/yarn-3.2.1.cjs vendored Normal file

File diff suppressed because one or more lines are too long

7
.yarnrc.yml Normal file
View File

@@ -0,0 +1,7 @@
nodeLinker: node-modules
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
spec: "@yarnpkg/plugin-interactive-tools"
yarnPath: .yarn/releases/yarn-3.2.1.cjs

View File

@@ -1,20 +1,21 @@
FROM node:16-alpine AS deps
WORKDIR /build
COPY package.json yarn.lock ./
COPY .yarn .yarn
COPY package.json yarn.lock .yarnrc.yml ./
RUN apk add --no-cache libc6-compat
RUN yarn install --frozen-lockfile
RUN yarn install --immutable
FROM node:16-alpine AS builder
WORKDIR /build
COPY --from=deps /build/node_modules ./node_modules
COPY src ./src
COPY server ./server
COPY scripts ./scripts
COPY prisma ./prisma
COPY package.json yarn.lock next.config.js next-env.d.ts zip-env.d.ts tsconfig.json ./
COPY .yarn .yarn
COPY package.json yarn.lock .yarnrc.yml esbuild.config.js next.config.js next-env.d.ts zip-env.d.ts tsconfig.json ./
ENV ZIPLINE_DOCKER_BUILD 1
ENV NEXT_TELEMETRY_DISABLED 1
@@ -31,11 +32,11 @@ RUN addgroup --system --gid 1001 zipline
RUN adduser --system --uid 1001 zipline
COPY --from=builder --chown=zipline:zipline /build/.next ./.next
COPY --from=builder --chown=zipline:zipline /build/dist ./dist
COPY --from=builder --chown=zipline:zipline /build/node_modules ./node_modules
COPY --from=builder /build/next.config.js ./next.config.js
COPY --from=builder /build/src ./src
COPY --from=builder /build/server ./server
COPY --from=builder /build/scripts ./scripts
COPY --from=builder /build/prisma ./prisma
COPY --from=builder /build/tsconfig.json ./tsconfig.json
@@ -43,4 +44,4 @@ COPY --from=builder /build/package.json ./package.json
USER zipline
CMD ["node", "server"]
CMD ["node", "dist/server"]

46
Dockerfile-arm Normal file
View File

@@ -0,0 +1,46 @@
FROM node:16 AS deps
WORKDIR /build
COPY .yarn .yarn
COPY package.json yarn.lock .yarnrc.yml ./
RUN yarn install --immutable
FROM node:16 AS builder
WORKDIR /build
COPY --from=deps /build/node_modules ./node_modules
COPY src ./src
COPY scripts ./scripts
COPY prisma ./prisma
COPY .yarn .yarn
COPY package.json yarn.lock .yarnrc.yml esbuild.config.js next.config.js next-env.d.ts zip-env.d.ts tsconfig.json ./
ENV ZIPLINE_DOCKER_BUILD 1
ENV NEXT_TELEMETRY_DISABLED 1
RUN yarn build
FROM node:16 AS runner
WORKDIR /zipline
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
RUN addgroup --system --gid 1001 zipline
RUN adduser --system --uid 1001 zipline
COPY --from=builder --chown=zipline:zipline /build/.next ./.next
COPY --from=builder --chown=zipline:zipline /build/dist ./dist
COPY --from=builder --chown=zipline:zipline /build/node_modules ./node_modules
COPY --from=builder /build/next.config.js ./next.config.js
COPY --from=builder /build/src ./src
COPY --from=builder /build/scripts ./scripts
COPY --from=builder /build/prisma ./prisma
COPY --from=builder /build/tsconfig.json ./tsconfig.json
COPY --from=builder /build/package.json ./package.json
USER zipline
CMD ["node", "dist/server"]

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2021 dicedtomato
Copyright (c) 2022 dicedtomato
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -17,18 +17,19 @@
- Built with Next.js & React
- Token protected uploading
- Image uploading
- Password Protected Uploads
- URL shortening
- Text uploading
- URL Formats (uuid, dates, random alphanumeric, original name, zws)
- Discord embeds (OG metadata)
- Gallery viewer, and multiple file format support
- Easy setup instructions on [docs](https://zipline.diced.tech/) (One command install `docker-compose up -d`)
- Easy setup instructions on [docs](https://zipl.vercel.app/) (One command install `docker-compose up -d`)
## Installing
[See how to install here](https://zipline.diced.tech/docs/get-started)
[See how to install here](https://zipl.vercel.app/docs/get-started)
## Configuration
[See how to configure here](https://zipline.diced.tech/docs/config/overview)
[See how to configure here](https://zipl.vercel.app/docs/config/overview)
## Theming
[See how to theme here](https://zipline.diced.tech/docs/themes/reference)
[See how to theme here](https://zipl.vercel.app/docs/themes/reference)

View File

@@ -4,7 +4,7 @@
| Version | Supported |
| ------- | ------------------ |
| 3.2.x | :white_check_mark: |
| 3.4.4 | :white_check_mark: |
| < 3 | :x: |
| < 2 | :x: |

View File

@@ -1,6 +1,6 @@
[core]
secure = true
secret = 'some secret'
secret = 'changethis'
host = '0.0.0.0'
port = 3000
database_url = 'postgres://postgres:postgres@postgres/postgres'
@@ -16,4 +16,4 @@ length = 6
directory = './uploads'
user_limit = 104900000 # 100mb
admin_limit = 104900000 # 100mb
disabled_extentions = ['jpg']
disabled_extensions = ['jpg']

46
docker-compose.arm.yml Normal file
View File

@@ -0,0 +1,46 @@
version: '3'
services:
postgres:
image: postgres
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DATABASE=postgres
volumes:
- pg_data:/var/lib/postgresql/data
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U postgres']
interval: 10s
timeout: 5s
retries: 5
zipline:
image: ghcr.io/diced/zipline/arm64:trunk
ports:
- '3000:3000'
restart: unless-stopped
environment:
- SECURE=false
- SECRET=changethis
- HOST=0.0.0.0
- PORT=3000
- DATASOURCE_TYPE=local
- DATASOURCE_DIRECTORY=./uploads
- DATABASE_URL=postgresql://postgres:postgres@postgres/postgres/
- UPLOADER_ROUTE=/u
- UPLOADER_EMBED_ROUTE=/a
- UPLOADER_LENGTH=6
- UPLOADER_ADMIN_LIMIT=104900000
- UPLOADER_USER_LIMIT=104900000
- UPLOADER_DISABLED_EXTS=
- URLS_ROUTE=/go
- URLS_LENGTH=6
volumes:
- '$PWD/uploads:/zipline/uploads'
- '$PWD/public:/zipline/public'
depends_on:
- 'postgres'
volumes:
pg_data:

View File

@@ -2,11 +2,12 @@ version: '3'
services:
postgres:
image: postgres
environment:
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DATABASE=postgres
volumes:
volumes:
- pg_data:/var/lib/postgresql/data
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U postgres']
@@ -21,16 +22,17 @@ services:
ports:
- '3000:3000'
restart: unless-stopped
environment:
environment:
- SECURE=false
- SECRET=changethis
- HOST=0.0.0.0
- PORT=3000
- DATASOURCE_TYPE=local
- DATASOURCE_DIRECTORY=./uploads
- DATABASE_URL=postgresql://postgres:postgres@postgres/postgres/
- UPLOADER_ROUTE=/u
- UPLOADER_EMBED_ROUTE=/a
- UPLOADER_LENGTH=6
- UPLOADER_DIRECTORY=./uploads
- UPLOADER_ADMIN_LIMIT=104900000
- UPLOADER_USER_LIMIT=104900000
- UPLOADER_DISABLED_EXTS=

View File

@@ -2,11 +2,12 @@ version: '3'
services:
postgres:
image: postgres
environment:
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DATABASE=postgres
volumes:
volumes:
- pg_data:/var/lib/postgresql/data
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U postgres']
@@ -18,17 +19,18 @@ services:
image: ghcr.io/diced/zipline/zipline:trunk
ports:
- '3000:3000'
restart: unless-stopped
environment:
restart: always
environment:
- SECURE=false
- SECRET=changethis
- HOST=0.0.0.0
- PORT=3000
- DATASOURCE_TYPE=local
- DATASOURCE_DIRECTORY=./uploads
- DATABASE_URL=postgresql://postgres:postgres@postgres/postgres/
- UPLOADER_ROUTE=/u
- UPLOADER_EMBED_ROUTE=/a
- UPLOADER_LENGTH=6
- UPLOADER_DIRECTORY=./uploads
- UPLOADER_ADMIN_LIMIT=104900000
- UPLOADER_USER_LIMIT=104900000
- UPLOADER_DISABLED_EXTS=

40
esbuild.config.js Normal file
View File

@@ -0,0 +1,40 @@
const esbuild = require('esbuild');
const { existsSync } = require('fs');
const { rm } = require('fs/promises');
(async () => {
const watch = process.argv[2] === '--watch';
if (existsSync('./dist')) {
await rm('./dist', { recursive: true });
}
await esbuild.build({
tsconfig: 'tsconfig.json',
outdir: 'dist',
bundle: false,
platform: 'node',
treeShaking: true,
entryPoints: [
'src/server/index.ts',
'src/server/server.ts',
'src/server/util.ts',
'src/server/validateConfig.ts',
'src/lib/logger.ts',
'src/lib/readConfig.ts',
'src/lib/datasource/datasource.ts',
'src/lib/datasource/index.ts',
'src/lib/datasource/Local.ts',
'src/lib/datasource/S3.ts',
'src/lib/ds.ts',
'src/lib/config.ts',
],
format: 'cjs',
resolveExtensions: ['.ts', '.js'],
write: true,
watch,
incremental: watch,
sourcemap: false,
minify: process.env.NODE_ENV === 'production',
});
})();

View File

@@ -8,4 +8,7 @@ module.exports = {
},
];
},
api: {
responseLimit: false,
},
};

View File

@@ -1,14 +1,15 @@
{
"name": "zip3",
"version": "3.4.0",
"name": "zipline",
"version": "3.4.5",
"license": "MIT",
"scripts": {
"dev": "NODE_ENV=development node server",
"build": "npm-run-all build:schema build:next",
"dev": "node esbuild.config.js && REACT_EDITOR=code-insiders NODE_ENV=development node dist/server",
"build": "npm-run-all build:server build:schema build:next",
"build:server": "node esbuild.config.js",
"build:next": "next build",
"build:schema": "prisma generate --schema=prisma/schema.prisma",
"migrate:dev": "prisma migrate dev --create-only",
"start": "node server",
"start": "node dist/server",
"lint": "next lint",
"seed": "ts-node --compiler-options \"{\\\"module\\\":\\\"commonjs\\\"}\" --transpile-only prisma/seed.ts",
"docker:run": "docker-compose up -d",
@@ -17,47 +18,52 @@
},
"dependencies": {
"@iarna/toml": "2.2.5",
"@mantine/core": "^3.6.9",
"@mantine/dropzone": "^3.6.9",
"@mantine/hooks": "^3.6.9",
"@mantine/modals": "^3.6.9",
"@mantine/next": "^3.6.9",
"@mantine/notifications": "^3.6.9",
"@mantine/prism": "^3.6.11",
"@mantine/core": "^4.2.9",
"@mantine/dropzone": "^4.2.9",
"@mantine/hooks": "^4.2.9",
"@mantine/modals": "^4.2.9",
"@mantine/next": "^4.2.9",
"@mantine/notifications": "^4.2.9",
"@mantine/prism": "^4.2.9",
"@modulz/radix-icons": "^4.0.0",
"@prisma/client": "^3.9.2",
"@prisma/migrate": "^3.9.2",
"@prisma/sdk": "^3.9.2",
"@reduxjs/toolkit": "^1.6.0",
"argon2": "^0.28.2",
"colorette": "^1.2.2",
"cookie": "^0.4.1",
"fecha": "^4.2.1",
"multer": "^1.4.2",
"next": "^12.1.0",
"prisma": "^3.9.2",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-redux": "^7.2.4",
"react-table": "^7.7.0",
"redux": "^4.1.0",
"redux-thunk": "^2.3.0",
"@prisma/client": "^3.15.2",
"@prisma/migrate": "^3.15.2",
"@prisma/sdk": "^3.15.2",
"@reduxjs/toolkit": "^1.8.2",
"argon2": "^0.28.5",
"aws-sdk": "^2.1156.0",
"colorette": "^2.0.19",
"cookie": "^0.5.0",
"fecha": "^4.2.3",
"fflate": "^0.7.3",
"find-my-way": "^6.3.0",
"multer": "^1.4.5-lts.1",
"next": "^12.1.6",
"prisma": "^3.15.2",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-redux": "^8.0.2",
"react-table": "^7.8.0",
"redux": "^4.2.0",
"redux-thunk": "^2.4.1",
"uuid": "^8.3.2",
"yup": "^0.32.9"
"yup": "^0.32.11"
},
"devDependencies": {
"@types/cookie": "^0.4.0",
"@types/multer": "^1.4.6",
"@types/cookie": "^0.5.1",
"@types/multer": "^1.4.7",
"@types/node": "^15.12.2",
"babel-plugin-import": "^1.13.3",
"babel-plugin-import": "^1.13.5",
"esbuild": "^0.14.44",
"eslint": "^7.32.0",
"eslint-config-next": "11.0.0",
"eslint-config-next": "12.1.6",
"npm-run-all": "^4.1.5",
"ts-node": "^10.0.0",
"typescript": "^4.3.2"
"ts-node": "^10.8.1",
"typescript": "^4.7.3"
},
"repository": {
"type": "git",
"url": "https://github.com/diced/zipline.git"
}
},
"packageManager": "yarn@3.2.1"
}

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "domains" TEXT[];

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Image" ADD COLUMN "password" TEXT;

View File

@@ -8,16 +8,17 @@ generator client {
}
model User {
id Int @id @default(autoincrement())
id Int @id @default(autoincrement())
username String
password String
token String
administrator Boolean @default(false)
systemTheme String @default("system")
administrator Boolean @default(false)
systemTheme String @default("system")
embedTitle String?
embedColor String @default("#2f3136")
embedSiteName String? @default("{image.file} • {user.name}")
ratelimited Boolean @default(false)
embedColor String @default("#2f3136")
embedSiteName String? @default("{image.file} • {user.name}")
ratelimited Boolean @default(false)
domains String[]
images Image[]
urls Url[]
}
@@ -37,6 +38,7 @@ model Image {
views Int @default(0)
favorite Boolean @default(false)
embed Boolean @default(false)
password String?
invisible InvisibleImage?
format ImageFormat @default(RANDOM)
user User @relation(fields: [userId], references: [id])
@@ -46,7 +48,7 @@ model Image {
model InvisibleImage {
id Int @id @default(autoincrement())
invis String @unique
imageId Int
imageId Int @unique
image Image @relation(fields: [imageId], references: [id])
}
@@ -64,12 +66,12 @@ model Url {
model InvisibleUrl {
id Int @id @default(autoincrement())
invis String @unique
urlId String
urlId String @unique
url Url @relation(fields: [urlId], references: [id])
}
model Stats {
id Int @id @default(autoincrement())
id Int @id @default(autoincrement())
created_at DateTime @default(now())
data Json
}
}

View File

@@ -1,164 +0,0 @@
const next = require('next').default;
const { createServer } = require('http');
const { mkdir } = require('fs/promises');
const { extname } = require('path');
const validateConfig = require('./validateConfig');
const Logger = require('../src/lib/logger');
const readConfig = require('../src/lib/readConfig');
const mimes = require('../scripts/mimes');
const { log, getStats, getFile, migrations } = require('./util');
const { PrismaClient } = require('@prisma/client');
const { version } = require('../package.json');
const exts = require('../scripts/exts');
const serverLog = Logger.get('server');
serverLog.info(`starting zipline@${version} server`);
const dev = process.env.NODE_ENV === 'development';
(async () => {
try {
await run();
} catch (e) {
serverLog.error(e);
process.exit(1);
}
})();
async function run() {
const a = readConfig();
const config = validateConfig(a);
process.env.DATABASE_URL = config.core.database_url;
await migrations();
await mkdir(config.uploader.directory, { recursive: true });
const app = next({
dir: '.',
dev,
quiet: !dev,
hostname: config.core.host,
port: config.core.port,
});
await app.prepare();
const handle = app.getRequestHandler();
const prisma = new PrismaClient();
const srv = createServer(async (req, res) => {
if (req.url.startsWith('/r')) {
const parts = req.url.split('/');
if (!parts[2] || parts[2] === '') return;
let image = await prisma.image.findFirst({
where: {
OR: [
{ file: parts[2] },
{ invisible:{ invis: decodeURI(parts[2]) } },
],
},
select: {
mimetype: true,
id: true,
file: true,
invisible: true,
},
});
if (!image) {
const data = await getFile(config.uploader.directory, parts[2]);
if (!data) return app.render404(req, res);
const mimetype = mimes[extname(parts[2])] ?? 'application/octet-stream';
res.setHeader('Content-Type', mimetype);
res.end(data);
} else {
const data = await getFile(config.uploader.directory, image.file);
if (!data) return app.render404(req, res);
await prisma.image.update({
where: { id: image.id },
data: { views: { increment: 1 } },
});
res.setHeader('Content-Type', image.mimetype);
res.end(data);
}
} else if (req.url.startsWith(config.uploader.route)) {
const parts = req.url.split('/');
if (!parts[2] || parts[2] === '') return;
let image = await prisma.image.findFirst({
where: {
OR: [
{ file: parts[2] },
{ invisible:{ invis: decodeURI(parts[2]) } },
],
},
select: {
mimetype: true,
id: true,
file: true,
invisible: true,
embed: true,
},
});
if (!image) {
const data = await getFile(config.uploader.directory, parts[2]);
if (!data) return app.render404(req, res);
const mimetype = mimes[extname(parts[2])] ?? 'application/octet-stream';
res.setHeader('Content-Type', mimetype);
res.end(data);
} else if (image.embed) {
handle(req, res);
} else {
const ext = image.file.split('.').pop();
if (Object.keys(exts).includes(ext)) return handle(req, res);
const data = await getFile(config.uploader.directory, image.file);
if (!data) return app.render404(req, res);
await prisma.image.update({
where: { id: image.id },
data: { views: { increment: 1 } },
});
res.setHeader('Content-Type', image.mimetype);
res.end(data);
}
} else {
handle(req, res);
}
if (config.core.logger) log(req.url, res.statusCode);
});
srv.on('error', (e) => {
serverLog.error(e);
process.exit(1);
});
srv.on('listening', () => {
serverLog.info(`listening on ${config.core.host}:${config.core.port}`);
});
srv.listen(config.core.port, config.core.host ?? '0.0.0.0');
const stats = await getStats(prisma, config);
await prisma.stats.create({
data: {
data: stats,
},
});
setInterval(async () => {
const stats = await getStats(prisma, config);
await prisma.stats.create({
data: {
data: stats,
},
});
if (config.core.logger) serverLog.info('stats updated');
}, config.core.stats_interval * 1000);
}

View File

@@ -1,40 +0,0 @@
const { object, bool, string, number, boolean, array } = require('yup');
const validator = object({
core: object({
secure: bool().default(false),
secret: string().min(8).required(),
host: string().default('0.0.0.0'),
port: number().default(3000),
database_url: string().required(),
logger: boolean().default(false),
stats_interval: number().default(1800),
}).required(),
uploader: object({
route: string().default('/u'),
embed_route: string().default('/a'),
length: number().default(6),
directory: string().default('./uploads'),
admin_limit: number().default(104900000),
user_limit: number().default(104900000),
disabled_extensions: array().default([]),
}).required(),
urls: object({
route: string().default('/go'),
length: number().default(6),
}).required(),
ratelimit: object({
user: number().default(0),
admin: number().default(0),
}),
});
module.exports = function validate(config) {
try {
return validator.validateSync(config, { abortEarly: false });
} catch (e) {
if (process.env.ZIPLINE_DOCKER_BUILD) return {};
throw `${e.errors.length} errors occured\n${e.errors.map(x => '\t' + x).join('\n')}`;
}
};

View File

@@ -8,7 +8,6 @@ import {
} from 'react-table';
import {
ActionIcon,
Checkbox,
createStyles,
Divider,
Group,

View File

@@ -122,6 +122,8 @@ export default function Layout({ children, user }) {
const openResetToken = () => modals.openConfirmModal({
title: 'Reset Token',
centered: true,
overlayBlur: 3,
children: (
<Text size='sm'>
Once you reset your token, you will have to update any uploaders to use this new token.
@@ -153,6 +155,8 @@ export default function Layout({ children, user }) {
const openCopyToken = () => modals.openConfirmModal({
title: 'Copy Token',
centered: true,
overlayBlur: 3,
children: (
<Text size='sm'>
Make sure you don&apos;t share this token with anyone as they will be able to upload files on your behalf.
@@ -179,7 +183,7 @@ export default function Layout({ children, user }) {
fixed
navbar={
<Navbar
padding='md'
p='md'
hiddenBreakpoint='sm'
hidden={!opened}
width={{ sm: 200, lg: 230 }}
@@ -245,7 +249,7 @@ export default function Layout({ children, user }) {
</Navbar>
}
header={
<Header height={70} padding='md'>
<Header height={70} p='md'>
<div style={{ display: 'flex', alignItems: 'center', height: '100%' }}>
<MediaQuery largerThan='sm' styles={{ display: 'none' }}>
<Burger
@@ -323,7 +327,7 @@ export default function Layout({ children, user }) {
</Header>
}
>
<Paper withBorder padding='md' shadow='xs'>{children}</Paper>
<Paper withBorder p='md' shadow='xs'>{children}</Paper>
</AppShell>
);
}
}

View File

@@ -0,0 +1,6 @@
import React from 'react';
import { Text } from '@mantine/core';
export default function StatText({ children }) {
return <Text color='gray' size='xl'>{children}</Text>;
}

View File

@@ -1,15 +1,16 @@
import React, { useEffect, useState } from 'react';
import Card from 'components/Card';
import Image from 'components/Image';
import ZiplineImage from 'components/Image';
import ImagesTable from 'components/ImagesTable';
import useFetch from 'lib/hooks/useFetch';
import { useStoreSelector } from 'lib/redux/store';
import { Box, Text, Table, Skeleton, Title, SimpleGrid } from '@mantine/core';
import { Text, Skeleton, Title, SimpleGrid } from '@mantine/core';
import { randomId, useClipboard } from '@mantine/hooks';
import Link from 'components/Link';
import { CopyIcon, Cross1Icon, TrashIcon } from '@modulz/radix-icons';
import { useNotifications } from '@mantine/notifications';
import StatText from 'components/StatText';
type Aligns = 'inherit' | 'right' | 'left' | 'center' | 'justify';
@@ -27,37 +28,6 @@ export function bytesToRead(bytes: number) {
return `${bytes.toFixed(1)} ${units[num]}`;
}
function StatText({ children }) {
return <Text color='gray' size='xl'>{children}</Text>;
}
function StatTable({ rows, columns }) {
return (
<Box sx={{ pt: 1 }}>
<Table highlightOnHover>
<thead>
<tr>
{columns.map(col => (
<th key={randomId()}>{col.name}</th>
))}
</tr>
</thead>
<tbody>
{rows.map(row => (
<tr key={randomId()}>
{columns.map(col => (
<td key={randomId()}>
{col.format ? col.format(row[col.id]) : row[col.id]}
</td>
))}
</tr>
))}
</tbody>
</Table>
</Box>
);
}
export default function Dashboard() {
const user = useStoreSelector(state => state.user);
@@ -128,8 +98,7 @@ export default function Dashboard() {
]}
>
{recent.length ? recent.map(image => (
// eslint-disable-next-line jsx-a11y/alt-text
<Image key={randomId()} image={image} updateImages={updateImages} />
<ZiplineImage key={randomId()} image={image} updateImages={updateImages} />
)) : [1,2,3,4].map(x => (
<div key={x}>
<Skeleton width='100%' height={220} sx={{ borderRadius: 1 }}/>

View File

@@ -1,12 +1,14 @@
import React from 'react';
import React, { useEffect, useState } from 'react';
import useFetch from 'hooks/useFetch';
import Link from 'components/Link';
import { useStoreDispatch, useStoreSelector } from 'lib/redux/store';
import { updateUser } from 'lib/redux/reducers/user';
import { useForm } from '@mantine/hooks';
import { Tooltip, TextInput, Button, Text, Title, Group, ColorInput } from '@mantine/core';
import { DownloadIcon } from '@modulz/radix-icons';
import { randomId, useForm, useInterval } from '@mantine/hooks';
import { Card, Tooltip, TextInput, Button, Text, Title, Group, ColorInput, MultiSelect, Space, Box, Table } from '@mantine/core';
import { DownloadIcon, Cross1Icon, TrashIcon } from '@modulz/radix-icons';
import { useNotifications } from '@mantine/notifications';
import { useModals } from '@mantine/modals';
function VarsTooltip({ children }) {
return (
@@ -24,9 +26,45 @@ function VarsTooltip({ children }) {
);
}
function ExportDataTooltip({ children }) {
return <Tooltip position='top' placement='center' color='' label='After clicking, if you have a lot of files the export can take a while to complete. A list of previous exports will be below to download.'>{children}</Tooltip>;
}
function ExportTable({ rows, columns }) {
return (
<Box sx={{ pt: 1 }} >
<Table highlightOnHover>
<thead>
<tr>
{columns.map(col => (
<th key={randomId()}>{col.name}</th>
))}
</tr>
</thead>
<tbody>
{rows.map(row => (
<tr key={randomId()}>
{columns.map(col => (
<td key={randomId()}>
{col.format ? col.format(row[col.id]) : row[col.id]}
</td>
))}
</tr>
))}
</tbody>
</Table>
</Box>
);
}
export default function Manage() {
const user = useStoreSelector(state => state.user);
const dispatch = useStoreDispatch();
const notif = useNotifications();
const modals = useModals();
const [exports, setExports] = useState([]);
const [domains, setDomains] = useState(user.domains ?? []);
const genShareX = (withEmbed: boolean = false, withZws: boolean = false) => {
const config = {
@@ -37,8 +75,8 @@ export default function Manage() {
RequestURL: `${window.location.protocol + '//' + window.location.hostname + (window.location.port ? ':' + window.location.port : '')}/api/upload`,
Headers: {
Authorization: user?.token,
...(withEmbed && {Embed: 'true'}),
...(withZws && {ZWS: 'true'}),
...(withEmbed && { Embed: 'true' }),
...(withZws && { ZWS: 'true' }),
},
URL: '$json:files[0]$',
Body: 'MultipartFormData',
@@ -61,6 +99,7 @@ export default function Manage() {
embedTitle: user.embedTitle ?? '',
embedColor: user.embedColor,
embedSiteName: user.embedSiteName ?? '',
domains: user.domains ?? [],
},
});
@@ -73,22 +112,127 @@ export default function Manage() {
if (cleanUsername === '') return form.setFieldError('username', 'Username can\'t be nothing');
const id = notif.showNotification({
title: 'Updating user...',
message: '',
loading: true,
autoClose: false,
});
const data = {
username: cleanUsername,
password: cleanPassword === '' ? null : cleanPassword,
embedTitle: cleanEmbedTitle === '' ? null : cleanEmbedTitle,
embedColor: cleanEmbedColor === '' ? null : cleanEmbedColor,
embedSiteName: cleanEmbedSiteName === '' ? null : cleanEmbedSiteName,
domains,
};
const newUser = await useFetch('/api/user', 'PATCH', data);
if (newUser.error) {
if (newUser.invalidDomains) {
notif.updateNotification(id, {
message: <>
<Text mt='xs'>The following domains are invalid:</Text>
{newUser.invalidDomains.map(err => (
<>
<Text color='gray' key={randomId()}>{err.domain}: {err.reason}</Text>
<Space h='md' />
</>
))}
</>,
color: 'red',
icon: <Cross1Icon />,
});
}
notif.updateNotification(id, {
title: 'Couldn\'t save user',
message: newUser.error,
color: 'red',
icon: <Cross1Icon />,
});
} else {
dispatch(updateUser(newUser));
notif.updateNotification(id, {
title: 'Saved User',
message: '',
});
}
};
const exportData = async () => {
const res = await useFetch('/api/user/export', 'POST');
if (res.url) {
notif.showNotification({
title: 'Export started...',
loading: true,
message: 'If you have a lot of files, the export may take a while. The list of exports will be updated every 30s.',
});
}
};
const getExports = async () => {
const res = await useFetch('/api/user/export');
setExports(res.exports.map(s => ({
date: new Date(Number(s.split('_')[3].slice(0, -4))),
full: s,
})).sort((a, b) => a.date.getTime() - b.date.getTime()));
};
const handleDelete = async () => {
const res = await useFetch('/api/user/files', 'DELETE', {
all: true,
});
if (!res.count) {
notif.showNotification({
title: 'Couldn\'t delete files',
message: res.error,
color: 'red',
icon: <Cross1Icon />,
});
} else {
notif.showNotification({
title: 'Deleted files',
message: `${res.count} files deleted`,
color: 'green',
icon: <TrashIcon />,
});
}
};
const openDeleteModal = () => modals.openConfirmModal({
title: 'Are you sure you want to delete all of your images?',
closeOnConfirm: false,
centered: true,
overlayBlur: 3,
labels: { confirm: 'Yes', cancel: 'No' },
onConfirm: () => {
modals.openConfirmModal({
title: 'Are you really sure?',
centered: true,
overlayBlur: 3,
labels: { confirm: 'Yes', cancel: 'No' },
onConfirm: () => {
handleDelete();
modals.closeAll();
},
onCancel: () => {
modals.closeAll();
},
});
},
});
const interval = useInterval(() => getExports(), 30000);
useEffect(() => {
getExports();
interval.start();
}, []);
return (
<>
<Title>Manage User</Title>
@@ -97,10 +241,23 @@ export default function Manage() {
</VarsTooltip>
<form onSubmit={form.onSubmit((v) => onSubmit(v))}>
<TextInput id='username' label='Username' {...form.getInputProps('username')} />
<TextInput id='password' label='Password'type='password' {...form.getInputProps('password')} />
<TextInput id='password' label='Password' type='password' {...form.getInputProps('password')} />
<TextInput id='embedTitle' label='Embed Title' {...form.getInputProps('embedTitle')} />
<ColorInput id='embedColor' label='Embed Color' {...form.getInputProps('embedColor')} />
<TextInput id='embedSiteName' label='Embed Site Name' {...form.getInputProps('embedSiteName')} />
<MultiSelect
id='domains'
label='Domains'
data={domains}
placeholder='Leave blank if you dont want random domain selection.'
creatable
searchable
clearable
getCreateLabel={query => `Add ${query}`}
onCreate={query => setDomains((current) => [...current, query])}
{...form.getInputProps('domains')}
/>
<Group position='right' sx={{ paddingTop: 12 }}>
<Button
type='submit'
@@ -108,6 +265,24 @@ export default function Manage() {
</Group>
</form>
<Title sx={{ paddingTop: 12 }}>Manage Data</Title>
<Text color='gray' sx={{ paddingBottom: 12 }}>Delete, or export your data into a zip file.</Text>
<Group>
<Button onClick={openDeleteModal} rightIcon={<TrashIcon />}>Delete All Data</Button>
<ExportDataTooltip><Button onClick={exportData} rightIcon={<DownloadIcon />}>Export Data</Button></ExportDataTooltip>
</Group>
<Card mt={22}>
<ExportTable
columns={[
{ id: 'name', name: 'Name' },
{ id: 'date', name: 'Date' },
]}
rows={exports ? exports.map((x, i) => ({
name: <Link href={'/api/user/export?name=' + x.full}>Export {i + 1}</Link>,
date: x.date.toLocaleString(),
})) : []} />
</Card>
<Title sx={{ paddingTop: 12, paddingBottom: 12 }}>ShareX Config</Title>
<Group>
<Button onClick={() => genShareX(false)} rightIcon={<DownloadIcon />}>ShareX Config</Button>
@@ -116,4 +291,4 @@ export default function Manage() {
</Group>
</>
);
}
}

View File

@@ -1,17 +1,11 @@
import React, { useEffect, useState } from 'react';
import Card from 'components/Card';
import Image from 'components/Image';
import ImagesTable from 'components/ImagesTable';
import StatText from 'components/StatText';
import useFetch from 'lib/hooks/useFetch';
import { useStoreSelector } from 'lib/redux/store';
import { Box, Text, Table, Skeleton, Title, SimpleGrid } from '@mantine/core';
import { randomId, useClipboard } from '@mantine/hooks';
import Link from 'components/Link';
import { CopyIcon, Cross1Icon, TrashIcon } from '@modulz/radix-icons';
import { useNotifications } from '@mantine/notifications';
type Aligns = 'inherit' | 'right' | 'left' | 'center' | 'justify';
import { randomId } from '@mantine/hooks';
export function bytesToRead(bytes: number) {
if (isNaN(bytes)) return '0.0 B';
@@ -27,10 +21,6 @@ export function bytesToRead(bytes: number) {
return `${bytes.toFixed(1)} ${units[num]}`;
}
function StatText({ children }) {
return <Text color='gray' size='xl'>{children}</Text>;
}
function StatTable({ rows, columns }) {
return (
<Box sx={{ pt: 1 }}>
@@ -58,9 +48,7 @@ function StatTable({ rows, columns }) {
);
}
export default function Dashboard() {
const user = useStoreSelector(state => state.user);
export default function Stats() {
const [stats, setStats] = useState(null);
const update = async () => {

View File

@@ -30,7 +30,7 @@ function getIconColor(status, theme) {
: theme.black;
}
export default function Upload({ route }) {
export default function Upload() {
const theme = useMantineTheme();
const notif = useNotifications();
const clipboard = useClipboard();
@@ -58,6 +58,7 @@ export default function Upload({ route }) {
title: 'Uploading Images...',
message: '',
loading: true,
autoClose: false,
});
const res = await fetch('/api/upload', {
@@ -89,10 +90,8 @@ export default function Upload({ route }) {
return (
<>
<Dropzone
onDrop={(f) => setFiles([...files, ...f])}
>
{(status) => (
<Dropzone onDrop={(f) => setFiles([...files, ...f])}>
{status => (
<>
<Group position='center' spacing='xl' style={{ minHeight: 220, pointerEvents: 'none' }}>
<ImageUploadIcon

View File

@@ -1,12 +1,12 @@
import React, { useState, useEffect } from 'react';
import { useStoreSelector } from 'lib/redux/store';
import useFetch from 'hooks/useFetch';
import { useRouter } from 'next/router';
import { useForm } from '@mantine/hooks';
import { Avatar, Modal, Title, TextInput, Group, Button, Card, Grid, ActionIcon, SimpleGrid, Switch, Skeleton } from '@mantine/core';
import { Avatar, Modal, Title, TextInput, Group, Button, Card, ActionIcon, SimpleGrid, Switch, Skeleton, Checkbox } from '@mantine/core';
import { Cross1Icon, PlusIcon, TrashIcon } from '@modulz/radix-icons';
import { useNotifications } from '@mantine/notifications';
import { useModals } from '@mantine/modals';
function CreateUserModal({ open, setOpen, updateUsers }) {
@@ -51,7 +51,7 @@ function CreateUserModal({ open, setOpen, updateUsers }) {
updateUsers();
};
return (
<Modal
opened={open}
@@ -76,22 +76,15 @@ export default function Users() {
const user = useStoreSelector(state => state.user);
const router = useRouter();
const notif = useNotifications();
const modals = useModals();
const [users, setUsers] = useState([]);
const [open, setOpen] = useState(false);
const updateUsers = async () => {
const us = await useFetch('/api/users');
if (!us.error) {
setUsers(us);
} else {
router.push('/dashboard');
};
};
const handleDelete = async (user) => {
const handleDelete = async (user, delete_images) => {
const res = await useFetch('/api/users', 'DELETE', {
id: user.id,
delete_images,
});
if (res.error) {
notif.showNotification({
@@ -107,9 +100,42 @@ export default function Users() {
color: 'green',
icon: <TrashIcon />,
});
updateUsers();
}
};
updateUsers();
// 2-step modal for deleting user if they want to delete their images too.
const openDeleteModal = user => modals.openConfirmModal({
title: `Delete ${user.username}?`,
closeOnConfirm: false,
centered: true,
overlayBlur: 3,
labels: { confirm: 'Yes', cancel: 'No' },
onConfirm: () => {
modals.openConfirmModal({
title: `Delete ${user.username}'s images?`,
labels: { confirm: 'Yes', cancel: 'No' },
centered: true,
overlayBlur: 3,
onConfirm: () => {
handleDelete(user, true);
modals.closeAll();
},
onCancel: () => {
handleDelete(user, false);
modals.closeAll();
},
});
},
});
const updateUsers = async () => {
const us = await useFetch('/api/users');
if (!us.error) {
setUsers(us);
} else {
router.push('/dashboard');
};
};
useEffect(() => {
@@ -121,7 +147,7 @@ export default function Users() {
<CreateUserModal open={open} setOpen={setOpen} updateUsers={updateUsers} />
<Group>
<Title sx={{ marginBottom: 12 }}>Users</Title>
<ActionIcon variant='filled' color='primary' onClick={() => setOpen(true)}><PlusIcon/></ActionIcon>
<ActionIcon variant='filled' color='primary' onClick={() => setOpen(true)}><PlusIcon /></ActionIcon>
</Group>
<SimpleGrid
cols={3}
@@ -130,23 +156,23 @@ export default function Users() {
{ maxWidth: 'sm', cols: 1, spacing: 'sm' },
]}
>
{users.length ? users.filter(x => x.username !== user.username).map((user, i) => (
{users.length ? users.filter(x => x.username !== user.username).map(user => (
<Card key={user.id} sx={{ maxWidth: '100%' }}>
<Group position='apart'>
<Group position='left'>
<Avatar color={user.administrator ? 'primary' : 'dark'}>{user.username[0]}</Avatar>
<Avatar color={user.administrator ? 'primary' : 'dark'}>{user.username[0]}</Avatar>
<Title>{user.username}</Title>
</Group>
<Group position='right'>
<ActionIcon aria-label='delete' onClick={() => handleDelete(user)}>
<ActionIcon aria-label='delete' onClick={() => openDeleteModal(user)}>
<TrashIcon />
</ActionIcon>
</Group>
</Group>
</Card>
)): [1,2,3,4].map(x => (
)) : [1, 2, 3, 4].map(x => (
<div key={x}>
<Skeleton width='100%' height={220} sx={{ borderRadius: 1 }}/>
<Skeleton width='100%' height={220} sx={{ borderRadius: 1 }} />
</div>
))}
</SimpleGrid>

View File

@@ -1,7 +1,6 @@
import type { Config } from './types';
import readConfig from './readConfig';
import validateConfig from '../../server/validateConfig';
import validateConfig from '../server/validateConfig';
if (!global.config) global.config = validateConfig(readConfig()) as unknown as Config;
if (!global.config) global.config = validateConfig(readConfig());
export default global.config;

View File

@@ -0,0 +1,43 @@
import { createReadStream, existsSync, ReadStream } from 'fs';
import { readdir, rm, stat, writeFile } from 'fs/promises';
import { join } from 'path';
import { Datasource } from './datasource';
export class Local extends Datasource {
public name: string = 'local';
public constructor(public path: string) {
super();
}
public async save(file: string, data: Buffer): Promise<void> {
await writeFile(join(process.cwd(), this.path, file), data);
}
public async delete(file: string): Promise<void> {
await rm(join(process.cwd(), this.path, file));
}
public get(file: string): ReadStream {
const full = join(process.cwd(), this.path, file);
if (!existsSync(full)) return null;
try {
return createReadStream(full);
} catch (e) {
return null;
}
}
public async size(): Promise<number> {
const files = await readdir(this.path);
let size = 0;
for (let i = 0, L = files.length; i !== L; ++i) {
const sta = await stat(join(this.path, files[i]));
size += sta.size;
}
return size;
}
}

75
src/lib/datasource/S3.ts Normal file
View File

@@ -0,0 +1,75 @@
import { Datasource } from './datasource';
import AWS from 'aws-sdk';
import { Readable } from 'stream';
import { ConfigS3Datasource } from 'lib/types';
export class S3 extends Datasource {
public name: string = 'S3';
public s3: AWS.S3;
public constructor(
public config: ConfigS3Datasource,
) {
super();
this.s3 = new AWS.S3({
accessKeyId: config.access_key_id,
endpoint: config.endpoint || null,
s3ForcePathStyle: config.force_s3_path,
secretAccessKey: config.secret_access_key,
});
}
public async save(file: string, data: Buffer): Promise<void> {
return new Promise((resolve, reject) => {
this.s3.upload({
Bucket: this.config.bucket,
Key: file,
Body: data,
}, err => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
public async delete(file: string): Promise<void> {
return new Promise((resolve, reject) => {
this.s3.deleteObject({
Bucket: this.config.bucket,
Key: file,
}, err => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
public get(file: string): Readable {
// Unfortunately, aws-sdk is bad and the stream still loads everything into memory.
return this.s3.getObject({
Bucket: this.config.bucket,
Key: file,
}).createReadStream();
}
public async size(): Promise<number> {
return new Promise((resolve, reject) => {
this.s3.listObjects({
Bucket: this.config.bucket,
}, (err, data) => {
if (err) {
reject(err);
} else {
const size = data.Contents.reduce((acc, cur) => acc + cur.Size, 0);
resolve(size);
}
});
});
}
}

View File

@@ -0,0 +1,10 @@
import { Readable } from 'stream';
export abstract class Datasource {
public name: string;
public abstract save(file: string, data: Buffer): Promise<void>;
public abstract delete(file: string): Promise<void>;
public abstract get(file: string): Readable;
public abstract size(): Promise<number>;
}

View File

@@ -0,0 +1,4 @@
export { Datasource } from './datasource';
export { Local } from './Local';
export { S3 } from './S3';

20
src/lib/ds.ts Normal file
View File

@@ -0,0 +1,20 @@
import config from './config';
import { S3, Local } from './datasource';
import Logger from './logger';
if (!global.datasource) {
switch (config.datasource.type) {
case 's3':
Logger.get('datasource').info(`Using S3(${config.datasource.s3.bucket}) datasource`);
global.datasource = new S3(config.datasource.s3);
break;
case 'local':
Logger.get('datasource').info(`Using local(${config.datasource.local.directory}) datasource`);
global.datasource = new Local(config.datasource.local.directory);
break;
default:
throw new Error('Invalid datasource type');
}
}
export default global.datasource;

View File

@@ -1,38 +0,0 @@
const { format } = require('fecha');
const { blueBright, red, cyan } = require('colorette');
module.exports = class Logger {
static get(clas) {
if (typeof clas !== 'function') if (typeof clas !== 'string') throw new Error('not string/function');
const name = clas.name ?? clas;
return new Logger(name);
}
constructor (name) {
this.name = name;
}
info(message) {
console.log(this.formatMessage('INFO', this.name, message));
}
error(error) {
console.log(this.formatMessage('ERROR', this.name, error.stack ?? error));
}
formatMessage(level, name, message) {
const time = format(new Date(), 'YYYY-MM-DD hh:mm:ss,SSS A');
return `${time} ${this.formatLevel(level)} [${blueBright(name)}] ${message}`;
}
formatLevel(level) {
switch (level) {
case 'INFO':
return cyan('INFO ');
case 'ERROR':
return red('ERROR');
}
}
};

45
src/lib/logger.ts Normal file
View File

@@ -0,0 +1,45 @@
import { format } from 'fecha';
import { blueBright, red, cyan } from 'colorette';
export enum LoggerLevel {
ERROR,
INFO,
}
export default class Logger {
public name: string;
static get(clas: any) {
if (typeof clas !== 'function') if (typeof clas !== 'string') throw new Error('not string/function');
const name = clas.name ?? clas;
return new Logger(name);
}
constructor(name: string) {
this.name = name;
}
info(...args) {
console.log(this.formatMessage(LoggerLevel.INFO, this.name, args.join(' ')));
}
error(...args: any[]) {
console.log(this.formatMessage(LoggerLevel.ERROR, this.name, args.map(error => error.stack ?? error).join(' ')));
}
formatMessage(level: LoggerLevel, name, message) {
const time = format(new Date(), 'YYYY-MM-DD hh:mm:ss,SSS A');
return `${time} ${this.formatLevel(level)} [${blueBright(name)}] ${message}`;
}
formatLevel(level: LoggerLevel) {
switch (level) {
case LoggerLevel.INFO:
return cyan('INFO ');
case LoggerLevel.ERROR:
return red('ERROR');
}
}
};

View File

@@ -11,7 +11,7 @@ export interface NextApiFile {
originalname: string;
encoding: string;
mimetype: string;
buffer: string;
buffer: Buffer;
size: number;
}
@@ -25,6 +25,7 @@ export type NextApiReq = NextApiRequest & {
administrator: boolean;
id: number;
password: string;
domains: string[];
} | null | void>;
getCookie: (name: string) => string | null;
cleanCookie: (name: string) => void;
@@ -33,7 +34,7 @@ export type NextApiReq = NextApiRequest & {
export type NextApiRes = NextApiResponse & {
error: (message: string) => void;
forbid: (message: string) => void;
forbid: (message: string, extra?: any) => void;
bad: (message: string) => void;
json: (json: any) => void;
ratelimited: () => void;
@@ -52,11 +53,12 @@ export const withZipline = (handler: (req: NextApiRequest, res: NextApiResponse)
});
};
res.forbid = (message: string) => {
res.forbid = (message: string, extra: any = {}) => {
res.setHeader('Content-Type', 'application/json');
res.status(403);
res.json({
error: '403: ' + message,
...extra,
});
};
@@ -93,6 +95,7 @@ export const withZipline = (handler: (req: NextApiRequest, res: NextApiResponse)
maxAge: undefined,
}));
};
req.user = async () => {
try {
const userId = req.getCookie('user');
@@ -111,6 +114,7 @@ export const withZipline = (handler: (req: NextApiRequest, res: NextApiResponse)
systemTheme: true,
token: true,
username: true,
domains: true,
},
});

View File

@@ -1,7 +1,8 @@
const { existsSync, readFileSync } = require('fs');
const { join } = require('path');
const parse = require('@iarna/toml/parse-string.js');
const Logger = require('./logger.js');
import { existsSync, readFileSync } from 'fs';
import { join } from 'path';
import parse from '@iarna/toml/parse-string';
import Logger from './logger';
import { Config } from './types';
const e = (val, type, fn) => ({ val, type, fn });
@@ -14,12 +15,19 @@ const envValues = [
e('LOGGER', 'boolean', (c, v) => c.core.logger = v ?? true),
e('STATS_INTERVAL', 'number', (c, v) => c.core.stats_interval = v),
e('DATASOURCE_TYPE', 'string', (c, v) => c.datasource.type = v),
e('DATASOURCE_LOCAL_DIRECTORY', 'string', (c, v) => c.datasource.local.directory = v),
e('DATASOURCE_S3_ACCESS_KEY_ID', 'string', (c, v) => c.datasource.s3.access_key_id = v ),
e('DATASOURCE_S3_SECRET_ACCESS_KEY', 'string', (c, v) => c.datasource.s3.secret_access_key = v),
e('DATASOURCE_S3_ENDPOINT', 'string', (c, v) => c.datasource.s3.endpoint = v ?? null),
e('DATASOURCE_S3_FORCE_S3_PATH', 'string', (c, v) => c.datasource.s3.force_s3_path = v ?? false),
e('DATASOURCE_S3_BUCKET', 'string', (c, v) => c.datasource.s3.bucket = v),
e('UPLOADER_ROUTE', 'string', (c, v) => c.uploader.route = v),
e('UPLOADER_LENGTH', 'number', (c, v) => c.uploader.length = v),
e('UPLOADER_DIRECTORY', 'string', (c, v) => c.uploader.directory = v),
e('UPLOADER_ADMIN_LIMIT', 'number', (c, v) => c.uploader.admin_limit = v),
e('UPLOADER_USER_LIMIT', 'number', (c, v) => c.uploader.user_limit = v),
e('UPLOADER_DISABLED_EXTS', 'array', (c, v) => v ? c.uploader.disabled_extentions = v : c.uploader.disabled_extentions = []),
e('UPLOADER_DISABLED_EXTS', 'array', (c, v) => v ? c.uploader.disabled_extensions = v : c.uploader.disabled_extensions = []),
e('URLS_ROUTE', 'string', (c, v) => c.urls.route = v),
e('URLS_LENGTH', 'number', (c, v) => c.urls.length = v),
@@ -28,7 +36,7 @@ const envValues = [
e('RATELIMIT_ADMIN', 'number', (c, v) => c.ratelimit.user = v ?? 0),
];
module.exports = function readConfig() {
export default function readConfig(): Config {
if (!existsSync(join(process.cwd(), 'config.toml'))) {
if (!process.env.ZIPLINE_DOCKER_BUILD) Logger.get('config').info('reading environment');
return tryReadEnv();
@@ -43,7 +51,7 @@ module.exports = function readConfig() {
}
};
function tryReadEnv() {
function tryReadEnv(): Config {
const config = {
core: {
secure: undefined,
@@ -54,13 +62,25 @@ function tryReadEnv() {
logger: undefined,
stats_interval: undefined,
},
datasource: {
type: undefined,
local: {
directory: undefined,
},
s3: {
access_key_id: undefined,
secret_access_key: undefined,
endpoint: undefined,
bucket: undefined,
force_s3_path: undefined,
},
},
uploader: {
route: undefined,
length: undefined,
directory: undefined,
admin_limit: undefined,
user_limit: undefined,
disabled_extentions: undefined,
disabled_extensions: undefined,
},
urls: {
route: undefined,
@@ -74,7 +94,7 @@ function tryReadEnv() {
for (let i = 0, L = envValues.length; i !== L; ++i) {
const envValue = envValues[i];
let value = process.env[envValue.val];
let value: any = process.env[envValue.val];
if (!value) {
envValues[i].fn(config, undefined);
@@ -104,4 +124,4 @@ function parseToBoolean(value) {
function parseToArray(value) {
return value.split(',');
}
}

View File

@@ -7,6 +7,7 @@ export interface User {
embedColor: string;
embedSiteName: string;
systemTheme: string;
domains: string[];
}
const initialState: User = null;

View File

@@ -1,54 +1,13 @@
// https://github.com/mikecao/umami/blob/master/redux/store.js
import { useMemo } from 'react';
import { Action, CombinedState, configureStore, EnhancedStore } from '@reduxjs/toolkit';
import thunk, { ThunkAction } from 'redux-thunk';
import { configureStore } from '@reduxjs/toolkit';
import rootReducer from './reducers';
import { User } from './reducers/user';
import { useDispatch, TypedUseSelectorHook, useSelector } from 'react-redux';
let store: EnhancedStore<CombinedState<{
user: User;
}>>;
export function getStore(preloadedState) {
return configureStore({
reducer: rootReducer,
middleware: [thunk],
preloadedState,
});
}
export const initializeStore = preloadedState => {
let _store = store ?? getStore(preloadedState);
if (preloadedState && store) {
_store = getStore({
...store.getState(),
...preloadedState,
});
store = undefined;
}
if (typeof window === 'undefined') return _store;
if (!store) store = _store;
return _store;
};
export function useStore(initialState?: User) {
return useMemo(() => initializeStore(initialState), [initialState]);
}
export const store = configureStore({
reducer: rootReducer,
});
export type AppState = ReturnType<typeof store.getState>
export type AppDispatch = typeof store.dispatch
export type AppThunk<ReturnType = void> = ThunkAction<
ReturnType,
AppState,
unknown,
Action<User>
>
export const useStoreDispatch = () => useDispatch<AppDispatch>();
export const useStoreSelector: TypedUseSelectorHook<AppState> = useSelector;

View File

@@ -21,6 +21,28 @@ export interface ConfigCore {
stats_interval: number;
}
export interface ConfigDatasource {
// The type of datasource
type: 'local' | 's3';
// The local datasource
local: ConfigLocalDatasource;
s3?: ConfigS3Datasource;
}
export interface ConfigLocalDatasource {
// The directory to store files in
directory: string;
}
export interface ConfigS3Datasource {
access_key_id: string;
secret_access_key: string;
endpoint?: string;
bucket: string;
force_s3_path: boolean;
}
export interface ConfigUploader {
// The route uploads will be served on
route: string;
@@ -28,9 +50,6 @@ export interface ConfigUploader {
// Length of random chars to generate for file names
length: number;
// Where uploads are stored
directory: string;
// Admin file upload limit
admin_limit: number;
@@ -38,7 +57,7 @@ export interface ConfigUploader {
user_limit: number;
// Disabled extensions to block from uploading
disabled_extentions: string[];
disabled_extensions: string[];
}
export interface ConfigUrls {
@@ -63,4 +82,5 @@ export interface Config {
uploader: ConfigUploader;
urls: ConfigUrls;
ratelimit: ConfigRatelimit;
datasource: ConfigDatasource;
}

View File

@@ -1,47 +1,85 @@
import React, { useEffect } from 'react';
import React, { useEffect, useState } from 'react';
import Head from 'next/head';
import { GetServerSideProps } from 'next';
import { Box, useMantineTheme } from '@mantine/core';
import { Box, Button, Modal, PasswordInput } from '@mantine/core';
import config from 'lib/config';
import prisma from 'lib/prisma';
import { getFile } from '../../server/util';
import { parse } from 'lib/clientUtils';
import * as exts from '../../scripts/exts';
import { Prism } from '@mantine/prism';
import ZiplineTheming from 'components/Theming';
export default function EmbeddedImage({ image, user }) {
export default function EmbeddedImage({ image, user, pass }) {
const dataURL = (route: string) => `${route}/${image.file}`;
const [opened, setOpened] = useState(pass);
const [password, setPassword] = useState('');
const [error, setError] = useState('');
// reapply date from workaround
image.created_at = new Date(image.created_at);
const updateImage = () => {
const check = async () => {
const res = await fetch(`/api/auth/image?id=${image.id}&password=${password}`);
if (res.ok) {
setError('');
updateImage(`/api/auth/image?id=${image.id}&password=${password}`);
setOpened(false);
} else {
setError('Invalid password');
}
};
const updateImage = async (url?: string) => {
const imageEl = document.getElementById('image_content') as HTMLImageElement;
const original = new Image;
original.src = dataURL('/r');
const img = new Image();
img.addEventListener('load', function () {
if (this.naturalWidth > innerWidth) imageEl.width = Math.floor(this.naturalWidth * Math.min((innerHeight / this.naturalHeight), (innerWidth / this.naturalWidth)));
else imageEl.width = this.naturalWidth;
});
if (original.width > innerWidth) imageEl.width = Math.floor(original.width * Math.min((innerHeight / original.height), (innerWidth / original.width)));
else imageEl.width = original.width;
img.src = url || dataURL('/r');
if (url) {
imageEl.src = url;
};
};
useEffect(() => updateImage(), []);
useEffect(() => {
if (pass) {
setOpened(true);
} else {
updateImage();
}
}, []);
return (
<>
<Head>
{image.embed && (
<>
{user.embedSiteName && (<meta property='og:site_name' content={parse(user.embedSiteName, image, user)} />)}
{user.embedTitle && (<meta property='og:title' content={parse(user.embedTitle, image, user)} />)}
<meta property='theme-color' content={user.embedColor}/>
{user.embedSiteName && <meta property='og:site_name' content={parse(user.embedSiteName, image, user)} />}
{user.embedTitle && <meta property='og:title' content={parse(user.embedTitle, image, user)} />}
<meta property='theme-color' content={user.embedColor} />
</>
)}
<meta property='og:image' content={dataURL('/r')} />
<meta property='twitter:card' content='summary_large_image' />
<title>{image.file}</title>
</Head>
<Modal
opened={opened}
onClose={() => setOpened(false)}
title='Password Protected'
centered={true}
withCloseButton={true}
closeOnEscape={false}
closeOnClickOutside={false}
>
<PasswordInput label='Password' placeholder='Password' error={error} value={password} onChange={e => setPassword(e.target.value)} />
<Button fullWidth onClick={() => check()} mt='md'>
Submit
</Button>
</Modal>
<Box
sx={{
display: 'flex',
@@ -57,11 +95,13 @@ export default function EmbeddedImage({ image, user }) {
}
export const getServerSideProps: GetServerSideProps = async (context) => {
const id = context.params.id[1];
const route = context.params.id[0];
const routes = [config.uploader.route.substring(1), config.urls.route.substring(1)];
if (!routes.includes(route)) return { notFound: true };
if (route === routes[1]) {
const serve_on_root = /(^[^\\.]+\.[^\\.]+)/.test(route);
const id = serve_on_root ? route : context.params.id[1];
const uploader_route = config.uploader.route.substring(1);
if (route === config.urls.route.substring(1)) {
const url = await prisma.url.findFirst({
where: {
OR: [
@@ -80,10 +120,10 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
props: {},
redirect: {
destination: url.destination,
},
},
};
} else {
} else if (uploader_route === '' ? /(^[^\\.]+\.[^\\.]+)/.test(route) : route === uploader_route) {
const image = await prisma.image.findFirst({
where: {
OR: [
@@ -99,6 +139,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
userId: true,
embed: true,
created_at: true,
password: true,
},
});
if (!image) return { notFound: true };
@@ -120,8 +161,6 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
image.created_at = image.created_at.toString();
const prismRender = Object.keys(exts).includes(image.file.split('.').pop());
// let prismRenderCode;/
// if (prismRender) prismRenderCode = (await getFile(config.uploader.directory, id)).toString();
if (prismRender) return {
redirect: {
destination: `/code/${image.file}`,
@@ -130,18 +169,24 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
};
if (!image.mimetype.startsWith('image')) {
const data = await getFile(config.uploader.directory, id);
const { default: datasource } = await import('lib/ds');
const data = datasource.get(image.file);
if (!data) return { notFound: true };
context.res.end(data);
data.pipe(context.res);
return { props: {} };
}
const pass = image.password ? true : false;
delete image.password;
return {
props: {
image,
user,
pass,
},
};
} else {
return { notFound: true };
}
};

View File

@@ -1,12 +1,10 @@
import React from 'react';
import { Provider } from 'react-redux';
import Head from 'next/head';
import { useStore } from 'lib/redux/store';
import { store } from 'lib/redux/store';
import ZiplineTheming from 'components/Theming';
export default function MyApp({ Component, pageProps }) {
const store = useStore();
return (
<Provider store={store}>
<Head>

View File

@@ -0,0 +1,33 @@
import prisma from 'lib/prisma';
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import { checkPassword } from 'lib/util';
import datasource from 'lib/ds';
import mimes from '../../../../scripts/mimes';
import { extname } from 'path';
async function handler(req: NextApiReq, res: NextApiRes) {
const { id, password } = req.query;
const image = await prisma.image.findFirst({
where: {
id: Number(id),
},
});
if (!image) return res.status(404).end(JSON.stringify({ error: 'Image not found' }));
if (!password) return res.forbid('No password provided');
const valid = await checkPassword(password as string, image.password);
if (!valid) return res.forbid('Wrong password');
const data = datasource.get(image.file);
if (!data) return res.error('Image not found');
const mimetype = mimes[extname(image.file)] ?? 'application/octet-stream';
res.setHeader('Content-Type', mimetype);
data.pipe(res);
data.on('error', () => res.error('Image not found'));
data.on('end', () => res.end());
}
export default withZipline(handler);

View File

@@ -2,22 +2,19 @@ import multer from 'multer';
import prisma from 'lib/prisma';
import zconfig from 'lib/config';
import { NextApiReq, NextApiRes, withZipline } from 'lib/middleware/withZipline';
import { createInvisImage, randomChars } from 'lib/util';
import { writeFile } from 'fs/promises';
import { join } from 'path';
import { createInvisImage, randomChars, hashPassword } from 'lib/util';
import Logger from 'lib/logger';
import { ImageFormat, InvisibleImage } from '@prisma/client';
import { format as formatDate } from 'fecha';
import { v4 } from 'uuid';
import datasource from 'lib/ds';
const uploader = multer({
storage: multer.memoryStorage(),
});
const uploader = multer();
async function handler(req: NextApiReq, res: NextApiRes) {
if (req.method !== 'POST') return res.forbid('invalid method');
if (!req.headers.authorization) return res.forbid('no authorization');
const user = await prisma.user.findFirst({
where: {
token: req.headers.authorization,
@@ -26,19 +23,22 @@ async function handler(req: NextApiReq, res: NextApiRes) {
if (!user) return res.forbid('authorization incorect');
if (user.ratelimited) return res.ratelimited();
await run(uploader.array('file'))(req, res);
if (!req.files) return res.error('no files');
if (req.files && req.files.length === 0) return res.error('no files');
const rawFormat = ((req.headers.format || '') as string).toUpperCase() || 'RANDOM';
const format: ImageFormat = Object.keys(ImageFormat).includes(rawFormat) && ImageFormat[rawFormat];
const files = [];
for (let i = 0; i !== req.files.length; ++i) {
const file = req.files[i];
if (file.size > zconfig.uploader[user.administrator ? 'admin_limit' : 'user_limit']) return res.error(`file[${i}] size too big`);
const ext = file.originalname.split('.').pop();
if (zconfig.uploader.disabled_extentions.includes(ext)) return res.error('disabled extension recieved: ' + ext);
if (zconfig.uploader.disabled_extensions.includes(ext)) return res.error('disabled extension recieved: ' + ext);
let fileName: string;
switch (format) {
@@ -56,6 +56,10 @@ async function handler(req: NextApiReq, res: NextApiRes) {
break;
}
let password = null;
if (req.headers.password) {
password = await hashPassword(req.headers.password as string);
}
let invis: InvisibleImage;
const image = await prisma.image.create({
@@ -65,14 +69,20 @@ async function handler(req: NextApiReq, res: NextApiRes) {
userId: user.id,
embed: !!req.headers.embed,
format,
password,
},
});
if (req.headers.zws) invis = await createInvisImage(zconfig.uploader.length, image.id);
await writeFile(join(process.cwd(), zconfig.uploader.directory, image.file), file.buffer);
Logger.get('image').info(`User ${user.username} (${user.id}) uploaded an image ${image.file} (${image.id})`);
files.push(`${zconfig.core.secure ? 'https' : 'http'}://${req.headers.host}${zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
await datasource.save(image.file, file.buffer);
Logger.get('image').info(`User ${user.username} (${user.id}) uploaded an image ${image.file} (${image.id})`);
if (user.domains.length) {
const domain = user.domains[Math.floor(Math.random() * user.domains.length)];
files.push(`${domain}${zconfig.uploader.route === '/' ? '' : zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
} else {
files.push(`${zconfig.core.secure ? 'https' : 'http'}://${req.headers.host}${zconfig.uploader.route === '/' ? '' : zconfig.uploader.route}/${invis ? invis.invis : image.file}`);
}
}
if (user.administrator && zconfig.ratelimit.admin !== 0) {
@@ -113,8 +123,6 @@ function run(middleware: any) {
}
export default async function handlers(req, res) {
await run(uploader.array('file'))(req, res);
return withZipline(handler)(req, res);
};

View File

@@ -0,0 +1,134 @@
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma';
import Logger from 'lib/logger';
import { Zip, ZipPassThrough } from 'fflate';
import datasource from 'lib/ds';
import { readdir } from 'fs/promises';
import { createReadStream, createWriteStream } from 'fs';
async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user();
if (!user) return res.forbid('not logged in');
if (req.method === 'POST') {
const files = await prisma.image.findMany({
where: {
userId: user.id,
},
});
const zip = new Zip();
const export_name = `zipline_export_${user.id}_${Date.now()}.zip`;
const write_stream = createWriteStream(`/tmp/${export_name}`);
const onBackpressure = (stream, outputStream, cb) => {
const runCb = () => {
// Pause if either output or internal backpressure should be applied
cb(applyOutputBackpressure || backpressureBytes > backpressureThreshold);
};
// Internal backpressure (for when AsyncZipDeflate is slow)
const backpressureThreshold = 65536;
let backpressure = [];
let backpressureBytes = 0;
const push = stream.push;
stream.push = (dat, final) => {
backpressure.push(dat.length);
backpressureBytes += dat.length;
runCb();
push.call(stream, dat, final);
};
let ondata = stream.ondata;
const ondataPatched = (err, dat, final) => {
ondata.call(stream, err, dat, final);
backpressureBytes -= backpressure.shift();
runCb();
};
if (ondata) {
stream.ondata = ondataPatched;
} else {
// You can remove this condition if you make sure to
// call zip.add(file) before calling onBackpressure
Object.defineProperty(stream, 'ondata', {
get: () => ondataPatched,
set: cb => ondata = cb,
});
}
// Output backpressure (for when outputStream is slow)
let applyOutputBackpressure = false;
const write = outputStream.write;
outputStream.write = (data) => {
const outputNotFull = write.call(outputStream, data);
applyOutputBackpressure = !outputNotFull;
runCb();
};
outputStream.on('drain', () => {
applyOutputBackpressure = false;
runCb();
});
};
zip.ondata = async (err, data, final) => {
if (!err) {
write_stream.write(data);
if (final) {
write_stream.close();
Logger.get('user').info(`Export for ${user.username} (${user.id}) has completed and is available at ${export_name}`);
}
} else {
write_stream.close();
Logger.get('user').error(`Export for ${user.username} (${user.id}) has failed\n${err}`);
}
};
// for (const file of files) {
Logger.get('user').info(`Export for ${user.username} (${user.id}) has started`);
for (let i = 0; i !== files.length; ++i) {
const file = files[i];
const stream = datasource.get(file.file);
if (stream) {
const def = new ZipPassThrough(file.file);
zip.add(def);
onBackpressure(def, stream, shouldApplyBackpressure => {
if (shouldApplyBackpressure) {
stream.pause();
} else if (stream.isPaused()) {
stream.resume();
}
});
stream.on('data', c => def.push(c));
stream.on('end', () => def.push(new Uint8Array(0), true));
}
}
zip.end();
res.json({
url: '/api/user/export?name=' + export_name,
});
} else {
const export_name = req.query.name as string;
if (export_name) {
const parts = export_name.split('_');
if (Number(parts[2]) !== user.id) return res.forbid('cannot access export');
const stream = createReadStream(`/tmp/${export_name}`);
res.setHeader('Content-Type', 'application/zip');
res.setHeader('Content-Disposition', `attachment; filename="${export_name}"`);
stream.pipe(res);
} else {
const files = await readdir('/tmp');
const exports = files.filter(f => f.startsWith('zipline_export_'));
res.json({
exports,
});
}
}
}
export default withZipline(handler);

View File

@@ -1,29 +1,49 @@
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma';
import config from 'lib/config';
import { chunk } from 'lib/util';
import { rm } from 'fs/promises';
import { join } from 'path';
import Logger from 'lib/logger';
import datasource from 'lib/ds';
async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user();
if (!user) return res.forbid('not logged in');
if (req.method === 'DELETE') {
if (!req.body.id) return res.error('no file id');
if (req.body.all) {
const files = await prisma.image.findMany({
where: {
userId: user.id,
},
});
const image = await prisma.image.delete({
where: {
id: req.body.id,
},
});
for (let i = 0; i !== files.length; ++i) {
await datasource.delete(files[i].file);
}
await rm(join(process.cwd(), config.uploader.directory, image.file));
const { count } = await prisma.image.deleteMany({
where: {
userId: user.id,
},
});
Logger.get('image').info(`User ${user.username} (${user.id}) deleted ${count} images.`);
Logger.get('image').info(`User ${user.username} (${user.id}) deleted an image ${image.file} (${image.id})`);
return res.json({ count });
} else {
if (!req.body.id) return res.error('no file id');
return res.json(image);
const image = await prisma.image.delete({
where: {
id: req.body.id,
},
});
await datasource.delete(image.file);
Logger.get('image').info(`User ${user.username} (${user.id}) deleted an image ${image.file} (${image.id})`);
delete image.password;
return res.json(image);
}
} else if (req.method === 'PATCH') {
if (!req.body.id) return res.error('no file id');
@@ -36,6 +56,7 @@ async function handler(req: NextApiReq, res: NextApiRes) {
},
});
delete image.password;
return res.json(image);
} else {
let images = await prisma.image.findMany({
@@ -43,6 +64,9 @@ async function handler(req: NextApiReq, res: NextApiRes) {
userId: user.id,
favorite: !!req.query.favorite,
},
orderBy: {
created_at: 'desc',
},
select: {
created_at: true,
file: true,

View File

@@ -2,6 +2,7 @@ import prisma from 'lib/prisma';
import { hashPassword } from 'lib/util';
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import Logger from 'lib/logger';
import pkg from '../../../../package.json';
async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user();
@@ -51,6 +52,36 @@ async function handler(req: NextApiReq, res: NextApiRes) {
data: { systemTheme: req.body.systemTheme },
});
if (req.body.domains) {
if (!req.body.domains) await prisma.user.update({
where: { id: user.id },
data: { domains: [] },
});
const invalidDomains = [];
for (const domain of req.body.domains) {
try {
const url = new URL(domain);
url.pathname = '/api/version';
const res = await fetch(url.toString());
if (!res.ok) invalidDomains.push({ domain, reason: 'Got a non OK response' });
else {
const body = await res.json();
if (body?.local !== pkg.version) invalidDomains.push({ domain, reason: 'Version mismatch' });
else await prisma.user.update({
where: { id: user.id },
data: { domains: { push: url.origin } },
});
}
} catch (e) {
invalidDomains.push({ domain, reason: e.message });
}
}
if (invalidDomains.length) return res.forbid('Invalid domains', { invalidDomains });
}
const newUser = await prisma.user.findFirst({
where: {
id: Number(user.id),
@@ -66,6 +97,7 @@ async function handler(req: NextApiReq, res: NextApiRes) {
systemTheme: true,
token: true,
username: true,
domains: true,
},
});

View File

@@ -1,5 +1,7 @@
import { NextApiReq, NextApiRes, withZipline } from 'middleware/withZipline';
import prisma from 'lib/prisma';
import Logger from 'lib/logger';
import datasource from 'lib/ds';
async function handler(req: NextApiReq, res: NextApiRes) {
const user = await req.user();
@@ -16,6 +18,25 @@ async function handler(req: NextApiReq, res: NextApiRes) {
});
if (!deleteUser) return res.forbid('user doesn\'t exist');
if (req.body.delete_images) {
const files = await prisma.image.findMany({
where: {
userId: deleteUser.id,
},
});
for (let i = 0; i !== files.length; ++i) {
await datasource.delete(files[i].file);
}
const { count } = await prisma.image.deleteMany({
where: {
userId: deleteUser.id,
},
});
Logger.get('image').info(`User ${user.username} (${user.id}) deleted ${count} images of user ${deleteUser.username} (${deleteUser.id})`);
}
await prisma.user.delete({
where: {
id: deleteUser.id,

View File

@@ -9,7 +9,6 @@ import { Cross1Icon, DownloadIcon } from '@modulz/radix-icons';
export default function Login() {
const router = useRouter();
const notif = useNotifications();
const [versions, setVersions] = React.useState<{ upstream: string, local: string }>(null);
const form = useForm({
initialValues: {
@@ -36,25 +35,14 @@ export default function Login() {
icon: <Cross1Icon />,
});
} else {
router.push(router.query.url as string || '/dashboard');
await router.push(router.query.url as string || '/dashboard');
}
};
useEffect(() => {
(async () => {
const a = await fetch('/api/user');
if (a.ok) router.push('/dashboard');
else {
const v = await useFetch('/api/version');
setVersions(v);
if (v.local !== v.upstream) {
notif.showNotification({
title: 'Update available',
message: `A new version of Zipline is available. You are running ${v.local} and the latest version is ${v.upstream}.`,
icon: <DownloadIcon />,
});
}
}
if (a.ok) await router.push('/dashboard');
})();
}, []);
@@ -71,27 +59,6 @@ export default function Login() {
</form>
</div>
</Center>
<Box
sx={{
zIndex: 99,
position: 'fixed',
bottom: '10px',
right: '20px',
}}
>
{versions && (
<Tooltip
wrapLines
width={220}
transition='rotate-left'
transitionDuration={200}
label={versions.local !== versions.upstream ? 'Looks like you are running an outdated version of Zipline. Please update to the latest version.' : 'You are running the latest version of Zipline.'}
>
<Badge radius='md' size='lg' variant='dot' color={versions.local !== versions.upstream ? 'red' : 'primary'}>{versions.local}</Badge>
</Tooltip>
)}
</Box>
</>
);
}

View File

@@ -4,8 +4,6 @@ import { LoadingOverlay } from '@mantine/core';
export default function Logout() {
const router = useRouter();
const [visible, setVisible] = useState(true);
useEffect(() => {
(async () => {
@@ -20,7 +18,7 @@ export default function Logout() {
}, []);
return (
<LoadingOverlay visible={visible} />
<LoadingOverlay visible={true} />
);
}

View File

@@ -11,7 +11,7 @@ export default function Code() {
useEffect(() => {
(async () => {
const res = await fetch('/r/' + id);
if (id && !res.ok) router.push('/404');
if (id && !res.ok) await router.push('/404');
const data = await res.text();
if (id) setPrismRenderCode(data);
})();

View File

@@ -1,9 +1,7 @@
import React from 'react';
import { GetStaticProps } from 'next';
import useLogin from 'hooks/useLogin';
import Layout from 'components/Layout';
import Upload from 'components/pages/Upload';
import config from 'lib/config';
export default function UploadPage({ route }) {
const { user, loading } = useLogin();
@@ -14,17 +12,9 @@ export default function UploadPage({ route }) {
<Layout
user={user}
>
<Upload route={route}/>
<Upload/>
</Layout>
);
}
export const getStaticProps: GetStaticProps = async () => {
return {
props: {
route: process.env.ZIPLINE_DOCKER_BUILD === '1' ? '/u' : config.uploader.route,
},
};
};
UploadPage.title = 'Zipline - Upload';

7
src/server/index.ts Normal file
View File

@@ -0,0 +1,7 @@
import { version } from '../../package.json';
import Logger from '../lib/logger';
Logger.get('server').info(`starting zipline@${version} server`);
import Server from './server';
new Server();

183
src/server/server.ts Normal file
View File

@@ -0,0 +1,183 @@
import Router from 'find-my-way';
import { NextServer, RequestHandler } from 'next/dist/server/next';
import { Image, PrismaClient } from '@prisma/client';
import { createServer, IncomingMessage, OutgoingMessage, Server as HttpServer, ServerResponse } from 'http';
import next from 'next';
import config from '../lib/config';
import datasource from '../lib/ds';
import { getStats, log, migrations } from './util';
import { mkdir } from 'fs/promises';
import Logger from '../lib/logger';
import mimes from '../../scripts/mimes';
import { extname } from 'path';
import exts from '../../scripts/exts';
const serverLog = Logger.get('server');
export default class Server {
public router: Router.Instance<Router.HTTPVersion.V1>;
public nextServer: NextServer;
public handle: RequestHandler;
public prisma: PrismaClient;
private http: HttpServer;
public constructor() {
this.start();
}
private async start() {
// annoy user if they didnt change secret from default "changethis"
if (config.core.secret === 'changethis') {
serverLog.error('Secret is not set!');
serverLog.error('Running Zipline as is, without a randomized secret is not recommended and leaves your instance at risk!');
serverLog.error('Please change your secret in the config file or environment variables.');
serverLog.error('The config file is located at `config.toml`, or if using docker-compose you can change the variables in the `docker-compose.yml` file.');
serverLog.error('It is recomended to use a secret that is alphanumeric and randomized. A way you can generate this is through a password manager you may have.');
process.exit(1);
};
const dev = process.env.NODE_ENV === 'development';
process.env.DATABASE_URL = config.core.database_url;
await migrations();
this.prisma = new PrismaClient();
if (config.datasource.type === 'local') {
await mkdir(config.datasource.local.directory, { recursive: true });
}
this.nextServer = next({
dir: '.',
dev,
quiet: !dev,
hostname: config.core.host,
port: config.core.port,
});
this.handle = this.nextServer.getRequestHandler();
this.router = Router({
defaultRoute: (req, res) => {
this.handle(req, res);
},
});
this.router.on('GET', config.uploader.route === '/' ? '/:id(^[^\\.]+\\.[^\\.]+)' : `${config.uploader.route}/:id`, async (req, res, params) => {
const image = await this.prisma.image.findFirst({
where: {
OR: [
{ file: params.id },
{ invisible: { invis: decodeURI(params.id) } },
],
},
});
console.log(image);
if (!image) await this.rawFile(req, res, params.id);
if (image.password) await this.handle(req, res);
else if (image.embed) await this.handle(req, res);
else await this.fileDb(req, res, image);
});
this.router.on('GET', '/r/:id', async (req, res, params) => {
const image = await this.prisma.image.findFirst({
where: {
OR: [
{ file: params.id },
{ invisible: { invis: decodeURI(params.id) } },
],
},
});
if (!image) await this.rawFile(req, res, params.id);
if (image.password) await this.handle(req, res);
else await this.rawFileDb(req, res, image);
});
await this.nextServer.prepare();
this.http = createServer((req, res) => {
this.router.lookup(req, res);
if (config.core.logger) log(req.url);
});
this.http.on('error', (e) => {
serverLog.error(e);
process.exit(1);
});
this.http.on('listening', () => {
serverLog.info(`listening on ${config.core.host}:${config.core.port}`);
});
this.http.listen(config.core.port, config.core.host ?? '0.0.0.0');
this.stats();
}
private async rawFile(req: IncomingMessage, res: OutgoingMessage, id: string) {
const data = datasource.get(id);
if (!data) return this.nextServer.render404(req, res as ServerResponse);
const mimetype = mimes[extname(id)] ?? 'application/octet-stream';
res.setHeader('Content-Type', mimetype);
data.pipe(res);
data.on('error', () => this.nextServer.render404(req, res as ServerResponse));
data.on('end', () => res.end());
}
private async rawFileDb(req: IncomingMessage, res: OutgoingMessage, image: Image) {
const data = datasource.get(image.file);
if (!data) return this.nextServer.render404(req, res as ServerResponse);
res.setHeader('Content-Type', image.mimetype);
data.pipe(res);
data.on('error', () => this.nextServer.render404(req, res as ServerResponse));
data.on('end', () => res.end());
await this.prisma.image.update({
where: { id: image.id },
data: { views: { increment: 1 } },
});
}
private async fileDb(req: IncomingMessage, res: OutgoingMessage, image: Image) {
const ext = image.file.split('.').pop();
if (Object.keys(exts).includes(ext)) return this.handle(req, res as ServerResponse);
const data = datasource.get(image.file);
if (!data) return this.nextServer.render404(req, res as ServerResponse);
res.setHeader('Content-Type', image.mimetype);
data.pipe(res);
data.on('error', () => this.nextServer.render404(req, res as ServerResponse));
data.on('end', () => res.end());
await this.prisma.image.update({
where: { id: image.id },
data: { views: { increment: 1 } },
});
}
private async stats() {
const stats = await getStats(this.prisma, datasource);
await this.prisma.stats.create({
data: {
data: stats,
},
});
setInterval(async () => {
const stats = await getStats(this.prisma, datasource);
await this.prisma.stats.create({
data: {
data: stats,
},
});
if (config.core.logger) serverLog.info('stats updated');
}, config.core.stats_interval * 1000);
}
}

View File

@@ -1,60 +1,34 @@
const { readFile, readdir, stat } = require('fs/promises');
const { join } = require('path');
const { Migrate } = require('@prisma/migrate/dist/Migrate.js');
const Logger = require('../src/lib/logger.js');
import { Migrate } from '@prisma/migrate/dist/Migrate';
import { ensureDatabaseExists } from '@prisma/migrate/dist/utils/ensureDatabaseExists';
import Logger from '../lib/logger';
import { Datasource } from 'lib/datasource';
import { PrismaClient } from '@prisma/client';
async function migrations() {
export async function migrations() {
const migrate = new Migrate('./prisma/schema.prisma');
await ensureDatabaseExists('apply', true, './prisma/schema.prisma');
const diagnose = await migrate.diagnoseMigrationHistory({
optInToShadowDatabase: false,
});
if (diagnose.history?.diagnostic === 'databaseIsBehind') {
Logger.get('database').info('migrating database');
await migrate.applyMigrations();
Logger.get('database').info('finished migrating database');
try {
Logger.get('database').info('migrating database');
await migrate.applyMigrations();
} finally {
migrate.stop();
Logger.get('database').info('finished migrating database');
}
}
migrate.stop();
}
function log(url) {
export function log(url: string) {
if (url.startsWith('/_next') || url.startsWith('/__nextjs')) return;
return Logger.get('url').info(url);
}
function shouldUseYarn() {
try {
execSync('yarnpkg --version', { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
async function getFile(dir, file) {
try {
const data = await readFile(join(process.cwd(), dir, file));
return data;
} catch (e) {
return null;
}
}
async function sizeOfDir(directory) {
const files = await readdir(directory);
let size = 0;
for (let i = 0, L = files.length; i !== L; ++i) {
const sta = await stat(join(directory, files[i]));
size += sta.size;
}
return size;
}
function bytesToRead(bytes) {
export function bytesToRead(bytes: number) {
const units = ['B', 'kB', 'MB', 'GB', 'TB', 'PB'];
let num = 0;
@@ -67,8 +41,8 @@ function bytesToRead(bytes) {
}
async function getStats(prisma, config) {
const size = await sizeOfDir(join(process.cwd(), config.uploader.directory));
export async function getStats(prisma: PrismaClient, datasource: Datasource) {
const size = await datasource.size();
const byUser = await prisma.image.groupBy({
by: ['userId'],
_count: {
@@ -112,19 +86,9 @@ async function getStats(prisma, config) {
size: bytesToRead(size),
size_num: size,
count,
count_by_user: count_by_user.sort((a,b) => b.count-a.count),
count_by_user: count_by_user.sort((a, b) => b.count - a.count),
count_users,
views_count: (viewsCount[0]?._sum?.views ?? 0),
types_count: types_count.sort((a,b) => b.count-a.count),
types_count: types_count.sort((a, b) => b.count - a.count),
};
}
module.exports = {
migrations,
bytesToRead,
getFile,
getStats,
log,
sizeOfDir,
shouldUseYarn,
};
}

View File

@@ -0,0 +1,62 @@
import { Config } from 'lib/types';
import { object, bool, string, number, boolean, array } from 'yup';
const validator = object({
core: object({
secure: bool().default(false),
secret: string().min(8).required(),
host: string().default('0.0.0.0'),
port: number().default(3000),
database_url: string().required(),
logger: boolean().default(false),
stats_interval: number().default(1800),
}).required(),
datasource: object({
type: string().default('local'),
local: object({
directory: string().default('./uploads'),
}),
s3: object({
access_key_id: string(),
secret_access_key: string(),
endpoint: string().notRequired().nullable(),
bucket: string(),
force_s3_path: boolean().default(false),
}).notRequired(),
}).required(),
uploader: object({
route: string().default('/u'),
embed_route: string().default('/a'),
length: number().default(6),
admin_limit: number().default(104900000),
user_limit: number().default(104900000),
disabled_extensions: array().default([]),
}).required(),
urls: object({
route: string().default('/go'),
length: number().default(6),
}).required(),
ratelimit: object({
user: number().default(0),
admin: number().default(0),
}),
});
export default function validate(config): Config {
try {
const validated = validator.validateSync(config, { abortEarly: false });
if (validated.datasource.type === 's3') {
const errors = [];
if (!validated.datasource.s3.access_key_id) errors.push('datasource.s3.access_key_id is a required field');
if (!validated.datasource.s3.secret_access_key) errors.push('datasource.s3.secret_access_key is a required field');
if (!validated.datasource.s3.bucket) errors.push('datasource.s3.bucket is a required field');
if (errors.length) throw { errors };
}
return validated as unknown as Config;
} catch (e) {
if (process.env.ZIPLINE_DOCKER_BUILD) return null;
throw `${e.errors.length} errors occured\n${e.errors.map(x => '\t' + x).join('\n')}`;
}
};

View File

@@ -20,11 +20,20 @@
"noEmit": true,
"baseUrl": "src",
"paths": {
"components/*": ["components/*"],
"hooks/*": ["lib/hooks/*"],
"middleware/*": ["lib/middleware/*"],
"lib/*": ["lib/*"]
}
"components/*": [
"components/*"
],
"hooks/*": [
"lib/hooks/*"
],
"middleware/*": [
"lib/middleware/*"
],
"lib/*": [
"lib/*"
]
},
"incremental": true
},
"include": [
"next-env.d.ts",

13301
yarn.lock

File diff suppressed because it is too large Load Diff

4
zip-env.d.ts vendored
View File

@@ -1,11 +1,13 @@
import type { PrismaClient } from '@prisma/client';
import type { Config } from './src/lib/types';
import type { Datasource } from 'lib/datasource';
import type { Config } from '.lib/types';
declare global {
namespace NodeJS {
interface Global {
prisma: PrismaClient;
config: Config;
datasource: Datasource
}
interface ProcessEnv {