0.10.0 - Merge pull request #125 from tycrek/fixes

pull/126/head releases/0.10.0
Josh Moore 2 years ago committed by GitHub
commit 89956a28cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

4
.github/README.md vendored

@ -339,7 +339,9 @@ For hosts who are looking for a reliable, always available storage solution with
[Amazon S3]: https://en.wikipedia.org/wiki/Amazon_S3
[Skynet Labs]: https://github.com/SkynetLabs
## Custom frontends
## Custom frontends - OUTDATED
**Please be aware that this section is outdated (marked as of 2022-04-15). It will be updated when I overhaul the frontend system.**
ass is intended to provide a strong backend for developers to build their own frontends around. [Git Submodules] make it easy to create custom frontends. Submodules are their own projects, which means you are free to build the router however you wish, as long as it exports the required items. A custom frontend is really just an [Express.js router].

4298
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{
"name": "ass",
"version": "0.9.1",
"version": "0.10.0",
"description": "The superior self-hosted ShareX server",
"main": "ass.js",
"engines": {
@ -45,31 +45,30 @@
"@tycrek/log": "^0.5.9",
"@tycrek/papito": "^0.3.4",
"any-shell-escape": "^0.1.1",
"autoprefixer": "^10.3.7",
"aws-sdk": "^2.1008.0",
"check-node-version": "^4.1.0",
"autoprefixer": "^10.4.4",
"aws-sdk": "^2.1115.0",
"check-node-version": "^4.2.1",
"crypto-random-string": "3.3.1",
"cssnano": "^5.0.8",
"cssnano": "^5.1.7",
"discord-webhook-node": "^1.1.8",
"escape-html": "^1.0.3",
"express": "^4.17.1",
"express-busboy": "^8.0.0",
"express-rate-limit": "^5.5.0",
"express": "^4.17.3",
"express-busboy": "^8.0.2",
"ffmpeg-static": "^4.4.0",
"fs-extra": "^10.0.0",
"fs-extra": "^10.0.1",
"helmet": "^4.6.0",
"jimp": "^0.16.1",
"luxon": "^2.0.2",
"luxon": "^2.3.1",
"node-fetch": "^2.6.7",
"node-vibrant": "^3.1.6",
"postcss-font-magician": "^3.0.0",
"prompt": "^1.2.0",
"prompt": "^1.3.0",
"pug": "^3.0.2",
"sanitize-filename": "^1.6.3",
"sharp": "^0.30.3",
"stream-to-array": "^2.3.0",
"submodule": "^1.2.1",
"tailwindcss": "^3.0.23",
"typescript": "^4.4.4",
"tailwindcss": "^3.0.24",
"typescript": "^4.6.3",
"uuid": "^8.3.2"
},
"devDependencies": {
@ -82,9 +81,10 @@
"@types/marked": "^3.0.0",
"@types/node": "^16.9.0",
"@types/node-fetch": "^2.5.12",
"@types/sharp": "^0.30.2",
"@types/stream-to-array": "^2.3.0",
"@types/tailwindcss": "^3.0.9",
"@types/uuid": "^8.3.1",
"@types/ws": "^7.4.7"
}
}
}

@ -1,39 +1,39 @@
import { AssRequest, AssResponse, ErrWrap } from './definitions';
let doSetup = null;
try {
// Check if config.json exists
require('../config.json');
} catch (err) {
doSetup = require('./setup').doSetup;
}
import { ErrWrap } from './types/definitions';
import { Config, MagicNumbers, Package } from 'ass-json';
//#region Imports
import fs from 'fs-extra';
import express, { Request, Response } from 'express';
import nofavicon from '@tycrek/express-nofavicon';
import epcss from '@tycrek/express-postcss';
import tailwindcss from 'tailwindcss';
import helmet from 'helmet';
// Run first time setup if using Docker (pseudo-process, setup will be run with docker exec)
if (doSetup) {
import { path, log, getTrueHttp, getTrueDomain } from './utils';
//#endregion
//#region Setup - Run first time setup if using Docker (pseudo-process, setup will be run with docker exec)
import { doSetup } from './setup';
const configPath = path('config.json');
if (!fs.existsSync(configPath)) {
doSetup();
// @ts-ignore
return;
}
//#endregion
// Load the config
const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile, useSia } = require('../config.json');
// Load the JSON
const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile, useSia }: Config = fs.readJsonSync(path('config.json'));
const { CODE_INTERNAL_SERVER_ERROR }: MagicNumbers = fs.readJsonSync(path('MagicNumbers.json'));
const { name, version, homepage }: Package = fs.readJsonSync(path('package.json'));
//#region Imports
import fs from 'fs-extra';
import express from 'express';
const nofavicon = require('@tycrek/express-nofavicon');
const epcss = require('@tycrek/express-postcss');
import tailwindcss from 'tailwindcss';
import helmet from 'helmet';
//#region Local imports
import uploadRouter from './routers/upload';
import resourceRouter from './routers/resource';
import { path, log, getTrueHttp, getTrueDomain } from './utils';
const { CODE_INTERNAL_SERVER_ERROR } = require('../MagicNumbers.json');
const { name: ASS_NAME, version: ASS_VERSION, homepage } = require('../package.json');
//#endregion
// Welcome :D
log.blank().info(`* ${ASS_NAME} v${ASS_VERSION} *`).blank();
log.blank().info(`* ${name} v${version} *`).blank();
//#region Variables, module setup
const app = express();
@ -96,17 +96,19 @@ app.use('/css', epcss({
}));
// '/:resouceId' always needs to be LAST since it's a catch-all route
app.use('/:resourceId', (req: AssRequest, _res, next) => (req.resourceId = req.params.resourceId, next()), ROUTERS.resource); // skipcq: JS-0086, JS-0090
app.use('/:resourceId', (req, _res, next) => (req.resourceId = req.params.resourceId, next()), ROUTERS.resource); // skipcq: JS-0086, JS-0090
// Error handler
app.use((err: ErrWrap, _req: AssRequest, res: AssResponse, _next: Function) => log.error(err).err(err).callback(() => res.sendStatus(CODE_INTERNAL_SERVER_ERROR))); // skipcq: JS-0128
// Host the server
log
.info('Users', `${Object.keys(users).length}`)
.info('Files', `${data.size}`)
.info('Data engine', data.name, data.type)
.info('Frontend', ASS_FRONTEND.enabled ? ASS_FRONTEND.brand : 'disabled', `${ASS_FRONTEND.enabled ? `${getTrueHttp()}${getTrueDomain()}${ASS_FRONTEND.endpoint}` : ''}`)
.info('Custom index', ASS_INDEX_ENABLED ? `enabled` : 'disabled')
.blank()
.express().Host(app, port, host, () => log.success('Ready for uploads', `Storing resources ${s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'}`));
app.use((err: ErrWrap, _req: Request, res: Response) => log.error(err).err(err).callback(() => res.sendStatus(CODE_INTERNAL_SERVER_ERROR))); // skipcq: JS-0128
(function start() {
if (data() == null) setTimeout(start, 100);
else log
.info('Users', `${Object.keys(users).length}`)
.info('Files', `${data().size}`)
.info('Data engine', data().name, data().type)
.info('Frontend', ASS_FRONTEND.enabled ? ASS_FRONTEND.brand : 'disabled', `${ASS_FRONTEND.enabled ? `${getTrueHttp()}${getTrueDomain()}${ASS_FRONTEND.endpoint}` : ''}`)
.info('Custom index', ASS_INDEX_ENABLED ? `enabled` : 'disabled')
.blank()
.express().Host(app, port, host, () => log.success('Ready for uploads', `Storing resources ${s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'}`));
})();

@ -2,11 +2,17 @@
* Used for global data management
*/
// Old data
const { JsonDataEngine } = require('@tycrek/papito');
import fs from 'fs-extra';
import { Config } from 'ass-json';
import { JsonDataEngine } from '@tycrek/papito'
let theData: any;
// Actual data engine
const { dataEngine } = require('../config.json');
const { _ENGINE_ } = require(dataEngine);
const { dataEngine }: Config = fs.readJsonSync('config.json');
import(dataEngine)
.then(({ _ENGINE_ }) => theData = _ENGINE_(new JsonDataEngine()))
.catch(err => console.error(err));
export const data = _ENGINE_(new JsonDataEngine());
// Export a self-calling const function returning the data
export const data = ((): any => theData);

@ -1,4 +1,4 @@
import { FileData } from './definitions';
import { FileData } from './types/definitions';
import fs from 'fs-extra';
import crypto from 'crypto';
import toArray from 'stream-to-array';
@ -6,8 +6,6 @@ import { log } from './utils';
/**
* Generates a SHA1 hash for the provided file
* @param {*} file The file to hash
* @returns The SHA1 hash
*/
export default (file: FileData): Promise<string> =>
new Promise((resolve, reject) =>

@ -1,13 +1,15 @@
import { FileData, IsPossible, AssRequest, AssResponse } from '../definitions';
import { FileData, IsPossible } from '../types/definitions';
import { Config, MagicNumbers } from 'ass-json';
import fs from 'fs-extra';
import escape from 'escape-html';
import fetch, { Response } from 'node-fetch';
import fetch, { Response as FetchResponse } from 'node-fetch';
import { Request, Response } from 'express';
import { deleteS3 } from '../storage';
import { SkynetDelete, SkynetDownload } from '../skynet';
const { diskFilePath, s3enabled, viewDirect, useSia } = require('../../config.json');
import { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } from '../utils';
const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../../MagicNumbers.json');
const { diskFilePath, s3enabled, viewDirect, useSia }: Config = fs.readJsonSync(path('config.json'));
const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, }: MagicNumbers = fs.readJsonSync(path('MagicNumbers.json'));
import { data } from '../data';
import { users } from '../auth';
@ -15,19 +17,19 @@ import express from 'express';
const router = express.Router();
// Middleware for parsing the resource ID and handling 404
router.use((req: AssRequest, res: AssResponse, next) => {
router.use((req: Request, res: Response, next) => {
// Parse the resource ID
req.ass = { resourceId: escape(req.resourceId || '').split('.')[0] };
// If the ID is invalid, return 404. Otherwise, continue normally
data.has(req.ass.resourceId)
data().has(req.ass.resourceId)
.then((has: boolean) => has ? next() : res.sendStatus(CODE_NOT_FOUND)) // skipcq: JS-0229
.catch(next);
});
// View file
router.get('/', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.resourceId).then((fileData: FileData) => {
const resourceId = req.ass!.resourceId;
router.get('/', (req: Request, res: Response, next) => data().get(req.ass.resourceId).then((fileData: FileData) => {
const resourceId = req.ass.resourceId;
// Build OpenGraph meta tags
const og = fileData.opengraph, ogs = [''];
@ -60,14 +62,14 @@ router.get('/', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.r
}).catch(next));
// Direct resource
router.get('/direct*', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.resourceId).then((fileData: FileData) => {
router.get('/direct*', (req: Request, res: Response, next) => data().get(req.ass.resourceId).then((fileData: FileData) => {
// Send file as an attachement for downloads
if (req.query.download)
res.header('Content-Disposition', `attachment; filename="${fileData.originalname}"`);
// Return the file differently depending on what storage option was used
const uploaders = {
s3: () => fetch(getS3url(fileData.randomId, fileData.ext)).then((file: Response) => {
s3: () => fetch(getS3url(fileData.randomId, fileData.ext)).then((file: FetchResponse) => {
file.headers.forEach((value, header) => res.setHeader(header, value));
file.body?.pipe(res);
}),
@ -86,8 +88,8 @@ router.get('/direct*', (req: AssRequest, res: AssResponse, next) => data.get(req
}).catch(next));
// Thumbnail response
router.get('/thumbnail', (req: AssRequest, res: AssResponse, next) =>
data.get(req.ass?.resourceId)
router.get('/thumbnail', (req: Request, res: Response, next) =>
data().get(req.ass.resourceId)
.then(({ is, thumbnail }: { is: IsPossible, thumbnail: string }) => fs.readFile((!is || (is.image || is.video)) ? path(diskFilePath, 'thumbnails/', thumbnail) : is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png'))
.then((fileData: Buffer) => res.type('jpg').send(fileData))
.catch(next));
@ -95,8 +97,8 @@ router.get('/thumbnail', (req: AssRequest, res: AssResponse, next) =>
// oEmbed response for clickable authors/providers
// https://oembed.com/
// https://old.reddit.com/r/discordapp/comments/82p8i6/a_basic_tutorial_on_how_to_get_the_most_out_of/
router.get('/oembed', (req: AssRequest, res: AssResponse, next) =>
data.get(req.ass?.resourceId)
router.get('/oembed', (req: Request, res: Response, next) =>
data().get(req.ass.resourceId)
.then((fileData: FileData) =>
res.type('json').send({
version: '1.0',
@ -113,9 +115,9 @@ router.get('/oembed', (req: AssRequest, res: AssResponse, next) =>
.catch(next));
// Delete file
router.get('/delete/:deleteId', (req: AssRequest, res: AssResponse, next) => {
router.get('/delete/:deleteId', (req: Request, res: Response, next) => {
let oldName: string, oldType: string; // skipcq: JS-0119
data.get(req.ass?.resourceId)
data().get(req.ass.resourceId)
.then((fileData: FileData) => {
// Extract info for logs
oldName = fileData.originalname;
@ -133,7 +135,7 @@ router.get('/delete/:deleteId', (req: AssRequest, res: AssResponse, next) => {
(!fileData.is || (fileData.is.image || fileData.is.video)) && fs.existsSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail))
? fs.rmSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) : () => Promise.resolve()]);
})
.then(() => data.del(req.ass?.resourceId))
.then(() => data().del(req.ass.resourceId))
.then(() => (log.success('Deleted', oldName, oldType), res.type('text').send('File has been deleted!'))) // skipcq: JS-0090
.catch(next);
});

@ -1,4 +1,5 @@
import { FileData, AssRequest, AssResponse, ErrWrap, User } from "../definitions";
import { ErrWrap, User } from '../types/definitions';
import { Config, MagicNumbers } from 'ass-json';
import fs from 'fs-extra';
import bb from 'express-busboy';
@ -6,14 +7,14 @@ import bb from 'express-busboy';
import { DateTime } from 'luxon';
import { Webhook, MessageBuilder } from 'discord-webhook-node';
import { processUploaded } from '../storage';
const { maxUploadSize, resourceIdSize, gfyIdSize, resourceIdType, spaceReplace } = require('../../config.json');
import { path, log, verify, getTrueHttp, getTrueDomain, generateId, formatBytes } from '../utils';
const { CODE_UNAUTHORIZED, CODE_PAYLOAD_TOO_LARGE } = require('../../MagicNumbers.json');
import { data } from '../data';
import { users } from '../auth';
const { maxUploadSize, resourceIdSize, gfyIdSize, resourceIdType, spaceReplace }: Config = fs.readJsonSync(path('config.json'));
const { CODE_UNAUTHORIZED, CODE_PAYLOAD_TOO_LARGE }: MagicNumbers = fs.readJsonSync(path('MagicNumbers.json'));
const ASS_LOGO = 'https://cdn.discordapp.com/icons/848274994375294986/8d339d4a2f3f54b2295e5e0ff62bd9e6.png?size=1024';
import express from 'express';
import express, { Request, Response } from 'express';
const router = express.Router();
// Set up express-busboy
@ -31,7 +32,7 @@ bb.extend(router, {
})); */
// Block unauthorized requests and attempt token sanitization
router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
router.post('/', (req: Request, res: Response, next: Function) => {
req.headers.authorization = req.headers.authorization || '';
req.token = req.headers.authorization.replace(/[^\da-z]/gi, ''); // Strip anything that isn't a digit or ASCII letter
!verify(req, users) ? log.warn('Upload blocked', 'Unauthorized').callback(() => res.sendStatus(CODE_UNAUTHORIZED)) : next(); // skipcq: JS-0093
@ -41,28 +42,28 @@ router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
router.post('/', processUploaded);
// Max upload size error handling
router.use('/', (err: ErrWrap, _req: AssRequest, res: AssResponse, next: Function) => err.message === 'LIMIT_FILE_SIZE' ? log.warn('Upload blocked', 'File too large').callback(() => res.status(CODE_PAYLOAD_TOO_LARGE).send(`Max upload size: ${maxUploadSize}MB`)) : next(err)); // skipcq: JS-0229
router.use('/', (err: ErrWrap, _req: Request, res: Response, next: Function) => err.message === 'LIMIT_FILE_SIZE' ? log.warn('Upload blocked', 'File too large').callback(() => res.status(CODE_PAYLOAD_TOO_LARGE).send(`Max upload size: ${maxUploadSize}MB`)) : next(err)); // skipcq: JS-0229
// Process uploaded file
router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
router.post('/', (req: Request, res: Response, next: Function) => {
// Load overrides
const trueDomain = getTrueDomain(req.headers['x-ass-domain']);
const generator = req.headers['x-ass-access'] || resourceIdType;
const generator = req.headers['x-ass-access']?.toString() || resourceIdType;
// Save domain with file
req.file!.domain = `${getTrueHttp()}${trueDomain}`;
req.file.domain = `${getTrueHttp()}${trueDomain}`;
// Get the uploaded time in milliseconds
req.file!.timestamp = DateTime.now().toMillis();
req.file.timestamp = DateTime.now().toMillis();
// Save the timezone offset
req.file!.timeoffset = req.headers['x-ass-timeoffset']?.toString() || 'UTC+0';
// Keep track of the token that uploaded the resource
req.file!.token = req.token ?? '';
req.file.token = req.token ?? '';
// Attach any embed overrides, if necessary
req.file!.opengraph = {
req.file.opengraph = {
title: req.headers['x-ass-og-title'],
description: req.headers['x-ass-og-description'],
author: req.headers['x-ass-og-author'],
@ -73,13 +74,13 @@ router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
};
// Fix spaces in originalname
req.file!.originalname = req.file!.originalname.replace(/\s/g, spaceReplace === '!' ? '' : spaceReplace);
req.file!.originalname = req.file.originalname.replace(/\s/g, spaceReplace === '!' ? '' : spaceReplace);
// Generate a unique resource ID
let resourceId = '';
// Function to call to generate a fresh ID. Used for multiple attempts in case an ID is already taken
const gen = () => generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file!.originalname);
const gen = () => generateId(generator, resourceIdSize, parseInt(req.headers['x-ass-gfycat']?.toString() || gfyIdSize.toString()), req.file.originalname);
// Keeps track of the number of attempts in case all ID's are taken
const attempts = {
@ -91,7 +92,7 @@ router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
function genCheckId(resolve: Function, reject: Function) {
const uniqueId = gen();
attempts.count++;
data.has(uniqueId)
data().has(uniqueId)
.then((exists: boolean) => {
log.debug('ID check', exists ? 'Taken' : 'Available');
return attempts.count - 1 >= attempts.max ? reject(new Error('No ID\'s remaining')) : exists ? genCheckId(resolve, reject) : resolve(uniqueId);
@ -105,16 +106,16 @@ router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
resourceId = uniqueId;
log.debug('Saving data', data.name);
})
.then(() => data.put(resourceId.split('.')[0], req.file))
.then(() => data().put(resourceId.split('.')[0], req.file))
.then(() => {
// Log the upload
const logInfo = `${req.file!.originalname} (${req.file!.mimetype}, ${formatBytes(req.file!.size)})`;
const logInfo = `${req.file!.originalname} (${req.file!.mimetype}, ${formatBytes(req.file.size)})`;
log.success('File uploaded', logInfo, `uploaded by ${users[req.token ?? ''] ? users[req.token ?? ''].username : '<token-only>'}`);
// Build the URLs
const resourceUrl = `${getTrueHttp()}${trueDomain}/${resourceId}`;
const thumbnailUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/thumbnail`;
const deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file!.deleteId}`;
const deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file.deleteId}`;
// Send the response
res.type('json').send({ resource: resourceUrl, thumbnail: thumbnailUrl, delete: deleteUrl })
@ -134,9 +135,9 @@ router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
.setTitle(logInfo)
//@ts-ignore
.setURL(resourceUrl)
.setDescription(`**Size:** \`${formatBytes(req.file!.size)}\`\n**[Delete](${deleteUrl})**`)
.setDescription(`**Size:** \`${formatBytes(req.file.size)}\`\n**[Delete](${deleteUrl})**`)
.setThumbnail(thumbnailUrl)
.setColor(req.file!.vibrant)
.setColor(req.file.vibrant)
.setTimestamp();
// Send the embed to the webhook, then delete the client after to free resources
@ -148,7 +149,7 @@ router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
// Also update the users upload count
if (!users[req.token ?? '']) {
const generateUsername = () => generateId('random', 20, 0, req.file!.size.toString()); // skipcq: JS-0074
const generateUsername = () => generateId('random', 20, 0, req.file.size.toString()); // skipcq: JS-0074
let username: string = generateUsername();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@ -1,7 +1,7 @@
import { FileData } from "./definitions";
import { FileData } from './types/definitions';
import fs, { ReadStream } from 'fs-extra';
import { path } from './utils';
const { SkynetClient } = require('@skynetlabs/skynet-nodejs');
import { SkynetClient } from '@skynetlabs/skynet-nodejs';
function getFullPath(fileData: FileData) {
return path('share', '.skynet', `${fileData.randomId}${fileData.ext}`.replace(/sia\:\/\//gi, ''));

@ -1,16 +1,18 @@
// https://docs.digitalocean.com/products/spaces/resources/s3-sdk-examples/
// https://www.digitalocean.com/community/tutorials/how-to-upload-a-file-to-object-storage-with-node-js
import { AssRequest, AssResponse, FileData } from './definitions';
import { FileData } from './types/definitions';
import { Config, MagicNumbers } from 'ass-json'
import fs, { Stats } from 'fs-extra';
import aws from 'aws-sdk';
import Thumbnail from './thumbnails';
import Vibrant from './vibrant';
import Hash from './hash';
import { generateId, log } from './utils';
import { path, generateId, log } from './utils';
import { SkynetUpload } from './skynet';
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, diskFilePath, saveAsOriginal, saveWithDate, mediaStrict, maxUploadSize, useSia } = require('../config.json');
const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('../MagicNumbers.json');
import { Request, Response } from 'express';
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, diskFilePath, saveAsOriginal, saveWithDate, mediaStrict, maxUploadSize, useSia }: Config = fs.readJsonSync(path('config.json'));
const { CODE_UNSUPPORTED_MEDIA_TYPE }: MagicNumbers = fs.readJsonSync(path('MagicNumbers.json'));
const ID_GEN_LENGTH = 32;
const ALLOWED_MIMETYPES = /(image)|(video)|(audio)\//;
@ -31,23 +33,23 @@ function getDatedDirname() {
return `${diskFilePath}${diskFilePath.endsWith('/') ? '' : '/'}${year}-${`0${month}`.slice(-2)}`; // skipcq: JS-0074
}
function getLocalFilename(req: AssRequest) {
return `${getDatedDirname()}/${saveAsOriginal ? req.file!.originalname : req.file!.sha1}`;
function getLocalFilename(req: Request) {
return `${getDatedDirname()}/${saveAsOriginal ? req.file.originalname : req.file.sha1}`;
}
export function processUploaded(req: AssRequest, res: AssResponse, next: Function) { // skipcq: JS-0045
export function processUploaded(req: Request, res: Response, next: Function) { // skipcq: JS-0045
// Fix file object
req.file = req.files!.file;
req.file = req.files.file;
// Other fixes
req.file!.ext = '.'.concat((req.file!.filename ?? '').split('.').pop() ?? '');
req.file!.originalname = req.file!.filename ?? '';
req.file!.path = req.file!.file ?? '';
req.file!.randomId = generateId('random', ID_GEN_LENGTH, 0, '');
req.file!.deleteId = generateId('random', ID_GEN_LENGTH, 0, '');
req.file.ext = '.'.concat((req.file.filename ?? '').split('.').pop() ?? '');
req.file.originalname = req.file.filename ?? '';
req.file.path = req.file.file ?? '';
req.file.randomId = generateId('random', ID_GEN_LENGTH, 0, '');
req.file.deleteId = generateId('random', ID_GEN_LENGTH, 0, '');
// Set up types
req.file!.is = {
req.file.is = {
image: false,
video: false,
audio: false,
@ -55,16 +57,16 @@ export function processUploaded(req: AssRequest, res: AssResponse, next: Functio
};
// Specify correct type
const isType = req.file!.mimetype.includes('image') ? 'image' : req.file!.mimetype.includes('video') ? 'video' : req.file!.mimetype.includes('audio') ? 'audio' : 'other';
req.file!.is[isType] = true;
const isType = req.file!.mimetype.includes('image') ? 'image' : req.file.mimetype.includes('video') ? 'video' : req.file.mimetype.includes('audio') ? 'audio' : 'other';
req.file.is[isType] = true;
// Block the resource if the mimetype is not an image or video
if (mediaStrict && !ALLOWED_MIMETYPES.test(req.file!.mimetype))
if (mediaStrict && !ALLOWED_MIMETYPES.test(req.file.mimetype))
return log
.warn('Upload blocked', req.file!.originalname, req.file!.mimetype)
.warn('Upload blocked', req.file.originalname, req.file.mimetype)
.warn('Strict media mode', 'only images, videos, & audio are file permitted')
.callback(() =>
fs.remove(req.file!.path)
fs.remove(req.file.path)
.then(() => log
.debug('Temp file', 'deleted')
.callback(() => res.sendStatus(CODE_UNSUPPORTED_MEDIA_TYPE)))
@ -73,29 +75,29 @@ export function processUploaded(req: AssRequest, res: AssResponse, next: Functio
.callback(next, err)));
// Remove unwanted fields
delete req.file!.uuid;
delete req.file!.field;
delete req.file!.file;
delete req.file!.filename;
delete req.file!.truncated;
delete req.file!.done;
delete req.file.uuid;
delete req.file.field;
delete req.file.file;
delete req.file.filename;
delete req.file.truncated;
delete req.file.done;
// Operations
// @ts-ignore
Promise.all([Thumbnail(req.file), Vibrant(req.file), Hash(req.file), fs.stat(req.file!.path)])
Promise.all([Thumbnail(req.file), Vibrant(req.file), Hash(req.file), fs.stat(req.file.path)])
// skipcq: JS-0086
.then(([thumbnail, vibrant, sha1, stat]: [string, string, string, Stats]) => (
req.file!.thumbnail = thumbnail, // skipcq: JS-0090
req.file!.vibrant = vibrant, // skipcq: JS-0090
req.file!.sha1 = sha1, // skipcq: JS-0090
req.file!.size = stat.size // skipcq: JS-0090
req.file.thumbnail = thumbnail, // skipcq: JS-0090
req.file.vibrant = vibrant, // skipcq: JS-0090
req.file.sha1 = sha1, // skipcq: JS-0090
req.file.size = stat.size // skipcq: JS-0090
))
// Check if file size is too big
.then(() => { if (req.file!.size / Math.pow(1024, 2) > maxUploadSize) throw new Error('LIMIT_FILE_SIZE'); })
.then(() => { if (req.file.size / Math.pow(1024, 2) > maxUploadSize) throw new Error('LIMIT_FILE_SIZE'); })
// Save file
.then(() => log.debug('Saving file', req.file!.originalname, s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'))
.then(() => log.debug('Saving file', req.file.originalname, s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'))
.then(() =>
// skipcq: JS-0229
new Promise((resolve, reject) => {
@ -103,31 +105,31 @@ export function processUploaded(req: AssRequest, res: AssResponse, next: Functio
// Upload to Amazon S3
if (s3enabled) return s3.putObject({
Bucket: s3bucket,
Key: req.file!.randomId.concat(req.file!.ext),
Key: req.file.randomId.concat(req.file.ext),
ACL: 'public-read',
ContentType: req.file!.mimetype,
Body: fs.createReadStream(req.file!.path)
ContentType: req.file.mimetype,
Body: fs.createReadStream(req.file.path)
}).promise().then(resolve).catch(reject);
// Use Sia Skynet
else if (useSia) return SkynetUpload(req.file!.path)
.then((skylink) => req.file!.randomId = skylink)
else if (useSia) return SkynetUpload(req.file.path)
.then((skylink) => req.file.randomId = skylink)
.then(resolve).catch(reject);
// Save to local storage
else return fs.ensureDir(getDatedDirname())
.then(() => fs.copy(req.file!.path, getLocalFilename(req), { preserveTimestamps: true }))
.then(() => fs.copy(req.file.path, getLocalFilename(req), { preserveTimestamps: true }))
.then(resolve).catch(reject);
}))
.then(() => log.debug('File saved', req.file!.originalname, s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'))
.then(() => log.debug('File saved', req.file.originalname, s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'))
.catch((err) => next(err))
// Delete the file
.then(() => fs.remove(req.file!.path))
.then(() => fs.remove(req.file.path))
.then(() => log.debug('Temp file', 'deleted'))
// Fix the file path
.then(() => !s3enabled && (req.file!.path = getLocalFilename(req))) // skipcq: JS-0090
.then(() => !s3enabled && (req.file.path = getLocalFilename(req))) // skipcq: JS-0090
.then(() => next())
.catch((err) => next(err));
}

@ -1,11 +1,14 @@
import { FileData } from "./definitions";
import { FileData } from './types/definitions';
import { Config } from 'ass-json';
import fs from 'fs-extra';
import ffmpeg from 'ffmpeg-static';
import Jimp from 'jimp';
import sharp from 'sharp';
// @ts-ignore
import shell from 'any-shell-escape';
import { exec } from 'child_process';
import { isProd, path } from './utils';
const { diskFilePath } = require('../config.json');
const { diskFilePath }: Config = fs.readJsonSync(path('config.json'));
// Thumbnail parameters
const THUMBNAIL = {
@ -16,9 +19,6 @@ const THUMBNAIL = {
/**
* Builds a safe escaped ffmpeg command
* @param {String} src Path to the input file
* @param {String} dest Path of the output file
* @returns {String} The command to execute
*/
function getCommand(src: String, dest: String) {
return shell([
@ -34,8 +34,6 @@ function getCommand(src: String, dest: String) {
/**
* Builds a thumbnail filename
* @param {String} oldName The original filename
* @returns {String} The filename for the thumbnail
*/
function getNewName(oldName: String) {
return oldName.concat('.thumbnail.jpg');
@ -43,8 +41,6 @@ function getNewName(oldName: String) {
/**
* Builds a path to the thumbnails
* @param {String} oldName The original filename
* @returns {String} The path to the thumbnail
*/
function getNewNamePath(oldName: String) {
return path(diskFilePath, 'thumbnails/', getNewName(oldName));
@ -52,7 +48,6 @@ function getNewNamePath(oldName: String) {
/**
* Extracts an image from a video file to use as a thumbnail, using ffmpeg
* @param {*} file The video file to pull a frame from
*/
function getVideoThumbnail(file: FileData) {
return new Promise((resolve: Function, reject: Function) => exec(
@ -64,23 +59,19 @@ function getVideoThumbnail(file: FileData) {
/**
* Generates a thumbnail for the provided image
* @param {*} file The file to generate a thumbnail for
*/
function getImageThumbnail(file: FileData) {
return new Promise((resolve, reject) =>
Jimp.read(file.path)
.then((image) => image
.quality(THUMBNAIL.QUALITY)
.resize(THUMBNAIL.WIDTH, THUMBNAIL.HEIGHT, Jimp.RESIZE_BICUBIC)
.write(getNewNamePath(file.randomId)))
sharp(file.path)
.resize(THUMBNAIL.WIDTH, THUMBNAIL.HEIGHT, { kernel: 'cubic' })
.jpeg({ quality: THUMBNAIL.QUALITY })
.toFile(getNewNamePath(file.randomId))
.then(resolve)
.catch(reject));
}
/**
* Generates a thumbnail
* @param {*} file The file to generate a thumbnail for
* @returns The thumbnail filename (NOT the path)
*/
export default (file: FileData): Promise<string> =>
new Promise((resolve, reject) =>

@ -1,4 +1,16 @@
import { Request, Response } from "express";
import { Request, Response } from 'express';
declare global {
namespace Express {
interface Request {
resourceId: string
ass: { resourceId: string }
token: string
file: FileData
files: { [key: string]: any }
}
}
}
export interface User {
token: string
@ -55,18 +67,6 @@ export interface OpenGraphData {
color?: string | string[]
}
export interface AssRequest extends Request {
resourceId?: string
ass?: { resourceId: string }
token?: string
file?: FileData
files?: { [key: string]: any }
}
export interface AssResponse extends Response {
}
export interface ErrWrap extends Error {
code?: number | string
}

@ -0,0 +1,49 @@
declare module 'ass-json' {
interface Config {
host: string
port: number
domain: string
maxUploadSize: number
isProxied: boolean
useSsl: boolean
resourceIdSize: number
resourceIdType: string
spaceReplace: string
gfyIdSize: number
mediaStrict: boolean
viewDirect: boolean
dataEngine: string
frontendName: string
indexFile: string
useSia: boolean
s3enabled: boolean
s3endpoint: string
s3bucket: string
s3usePathStyle: boolean
s3accessKey: string
s3secretKey: string
__WARNING__: string
diskFilePath: string
saveWithDate: boolean
saveAsOriginal: boolean
}
interface MagicNumbers {
HTTP: number
HTTPS: number
CODE_OK: number
CODE_NO_CONTENT: number
CODE_UNAUTHORIZED: number
CODE_NOT_FOUND: number
CODE_PAYLOAD_TOO_LARGE: number
CODE_UNSUPPORTED_MEDIA_TYPE: number
CODE_INTERNAL_SERVER_ERROR: number
KILOBYTES: number
}
interface Package {
name: string
version: string
homepage: string
}
}

@ -0,0 +1,7 @@
declare module './setup' {
export function doSetup(): void;
}
declare module '@tycrek/express-postcss';
declare module '@tycrek/express-nofavicon';
declare module '@tycrek/papito';
declare module '@skynetlabs/skynet-nodejs';

@ -1,4 +1,4 @@
import { AssRequest, FileData } from './definitions';
import { FileData } from './types/definitions';
import fs from 'fs-extra';
import Path from 'path';
import fetch from 'node-fetch';
@ -9,10 +9,12 @@ import zwsGen from './generators/zws';
import randomGen from './generators/random';
import gfyGen from './generators/gfycat';
import logger from './logger';
import { Request } from 'express';
const { HTTP, HTTPS, KILOBYTES } = require('../MagicNumbers.json');
// Catch config.json not existing when running setup script
try {
// todo: fix this
var { useSsl, port, domain, isProxied, diskFilePath, s3bucket, s3endpoint, s3usePathStyle } = require('../config.json'); // skipcq: JS-0239, JS-0102
} catch (ex) {
// @ts-ignore
@ -69,28 +71,26 @@ export function arrayEquals(arr1: any[], arr2: any[]) {
return arr1.length === arr2.length && arr1.slice().sort().every((value: string, index: number) => value === arr2.slice().sort()[index])
};
export function verify(req: AssRequest, users: JSON) {
export function verify(req: Request, users: JSON) {
return req.headers.authorization && Object.prototype.hasOwnProperty.call(users, req.headers.authorization);
}
export function generateId(mode: string, length: number, gfyLength: number, originalName: string) {
return (GENERATORS.has(mode) ? GENERATORS.get(mode)({ length, gfyLength }) : originalName);
}
// Set up pathing
export const path = (...paths: string[]) => Path.join(process.cwd(), ...paths);
const idModes = {
zws: 'zws', // Zero-width spaces (see: https://zws.im/)
og: 'original', // Use original uploaded filename
r: 'random', // Use a randomly generated ID with a mixed-case alphanumeric character set
gfy: 'gfycat' // Gfycat-style ID's (https://gfycat.com/unsungdiscretegrub)
};
const GENERATORS = new Map();
GENERATORS.set(idModes.zws, zwsGen);
GENERATORS.set(idModes.r, randomGen);
GENERATORS.set(idModes.gfy, gfyGen);
export function generateId(mode: string, length: number, gfyLength: number, originalName: string) {
return (GENERATORS.has(mode) ? GENERATORS.get(mode)({ length, gfyLength }) : originalName);
}
// Set up pathing
export const path = (...paths: string[]) => Path.join(process.cwd(), ...paths);
export const isProd = require('@tycrek/isprod')();
module.exports = {
@ -106,11 +106,11 @@ module.exports = {
randomHexColour,
sanitize,
verify,
renameFile: (req: AssRequest, newName: string) => new Promise((resolve: Function, reject) => {
renameFile: (req: Request, newName: string) => new Promise((resolve: Function, reject) => {
try {
const paths = [req.file!.destination, newName];
fs.rename(path(req.file!.path), path(...paths));
req.file!.path = Path.join(...paths);
const paths = [req.file.destination, newName];
fs.rename(path(req.file.path), path(...paths));
req.file.path = Path.join(...paths);
resolve();
} catch (err) {
reject(err);

@ -1,5 +1,6 @@
import { FileData } from './definitions';
import { FileData } from './types/definitions';
import Vibrant from 'node-vibrant';
import sharp from 'sharp';
import { randomHexColour } from './utils';
// Vibrant parameters
@ -8,22 +9,18 @@ const QUALITY = 3;
/**
* Extracts a prominent colour from the provided image file
* @param {*} file The image to extract a colour from
* @param {*} resolve Runs if Promise was successful
* @param {*} reject Runs if Promise failed
*/
function getVibrant(file: FileData, resolve: Function, reject: Function) {
Vibrant.from(file.path)
.maxColorCount(COLOR_COUNT)
.quality(QUALITY)
.getPalette()
sharp(file.path).png().toBuffer()
.then((data) => Vibrant.from(data)
.maxColorCount(COLOR_COUNT)
.quality(QUALITY)
.getPalette())
.then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b]!.population - palettes[a]!.population)[0]]!.hex))
.catch((err) => reject(err));
}
/**
* Extracts a colour from an image file. Returns a random Hex value if provided file is a video
* @param {*} file The file to get a colour from
* @returns The Vibrant colour as a Hex value (or random Hex value for videos)
*/
export default (file: FileData): Promise<string> => new Promise((resolve, reject) => (!file.is.image || file.mimetype.includes('webp')) ? resolve(randomHexColour()) : getVibrant(file, resolve, reject)); // skipcq: JS-0229

@ -2,8 +2,9 @@
"extends": "@tsconfig/node16/tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"target": "ES2021",
"lib": [
"ES2020",
"ES2021",
"DOM"
],
"allowJs": true,

Loading…
Cancel
Save