Switch codebase to TypeScript (#62)

It's about time.
pull/93/head releases/0.9.0
Josh Moore 3 years ago committed by tycrek
commit 1c4da1a849
No known key found for this signature in database
GPG Key ID: 25D74F3943625263

8
.github/README.md vendored

@ -351,14 +351,16 @@ ass has a number of pre-made npm scripts for you to use. **All** of these script
| Script | Description |
| ------ | ----------- |
| **`start`** | Starts the ass server. This is the default script & is run with **`npm start`**. |
| `build` | Compiles the TypeScript files into JavaScript. |
| `dev` | Chains the `build` and `compile` scripts together. |
| `setup` | Starts the easy setup process. Should be run after any updates that introduce new config options. |
| `metrics` | Runs the metrics script. This is a simple script that outputs basic resource statistics. |
| `new-token` | Generates a new API token. Accepts one parameter for specifying a username, like `npm run new-token <username>`. ass automatically detects the new token & reloads it, so there's no need to restart the server. |
| `restart` | Restarts the ass server using `systemctl`. More info soon (should work fine if you have an existing `ass.service` file) |
| `engine-check` | Ensures your environment meets the minimum Node & npm version requirements. |
| `logs` | Uses the [tlog Socket plugin] to stream logs from the ass server to your terminal, with full colour support (Remember to set [`FORCE_COLOR`] if you're using Systemd) |
| `docker-logs` | Alias for `docker-compose logs -f --tail=50 --no-log-prefix ass` |
| `docker-update` | Alias for `git pull && docker-compose up --force-recreate --build -d && docker image prune -f` |
| `docker-update` | Calls `git pull` then runs the `docker-uplite` script. |
| `docker-uplite` | Alias for `docker-compose up --force-recreate --build -d && docker image prune -f` |
| `docker-upfull` | Alias for `npm run docker-update && npm run docker-resetup` |
| `docker-resetup` | Alias for `docker-compose exec ass npm run setup && docker-compose restart` |
[tlog Socket plugin]: https://github.com/tycrek/tlog#socket

@ -1,4 +1,4 @@
# ass Dockerfile v0.1.0
# ass Dockerfile v0.2.0
# authors:
# - tycrek <t@tycrek.com> (https://tycrek.com/)
# - Zusier <zusier@pm.me> (https://github.com/Zusier)
@ -12,11 +12,11 @@ WORKDIR /opt/ass/
# Copy directory files (config.json, source files etc.)
COPY . ./
# Update npm to at least v7.x.x
RUN npm i -g npm@>=7
# Install dependencies
RUN npm i
# Update npm to at least v7.x.x,
# then install dependencies
RUN npm i -g npm@>=7 typescript && \
npm i --save-dev && \
npm run build
# Ensure these directories & files exist for compose volumes
RUN mkdir -p /opt/ass/uploads/thumbnails/ && \

@ -1 +1 @@
Subproject commit ac4fbe6d850919a4c347cf3bb69c8d2cb60cd87d
Subproject commit 2b143138ef2cb7790117709882e429f5e7680d75

@ -1,4 +1,4 @@
# ass Docker compose.yaml v0.1.0
# ass Docker compose.yaml v0.1.1
# authors:
# - tycrek <t@tycrek.com> (https://tycrek.com/)
# - Zusier <zusier@pm.me> (https://github.com/Zusier)
@ -11,7 +11,6 @@ services:
restart: unless-stopped
ports:
- "40115:40115"
- "45375:45375"
volumes:
- ./uploads:/opt/ass/uploads
- ./share:/opt/ass/share

@ -1,20 +0,0 @@
const fs = require('fs-extra');
const adjectives = fs.readFileSync('./generators/gfycat/adjectives.txt').toString().split('\n');
const animals = fs.readFileSync('./generators/gfycat/animals.txt').toString().split('\n');
// Don't trigger circular dependency during setup
if (!require.main.filename.includes('setup.js'))
var MIN_LENGTH = require('../setup').gfyIdSize; // skipcq: JS-0239, JS-0102
function getWord(list, delim = '') {
return list[Math.floor(Math.random() * list.length)].concat(delim);
}
function genString(count = MIN_LENGTH) {
let gfycat = '';
for (let i = 0; i < (count < MIN_LENGTH ? MIN_LENGTH : count); i++)
gfycat += getWord(adjectives, '-');
return gfycat.concat(getWord(animals));
};
module.exports = ({ gfyLength }) => genString(gfyLength);

@ -1,2 +0,0 @@
const { randomBytes } = require('crypto');
module.exports = (length, charset) => [...randomBytes(length)].map((byte) => charset[Number(byte) % charset.length]).join('').slice(1).concat(charset[0]);

@ -1,2 +0,0 @@
const cryptoRandomString = require('crypto-random-string');
module.exports = ({ length }) => cryptoRandomString({ length, type: 'alphanumeric' });

@ -1,3 +0,0 @@
const lengthGen = require('./lengthGen');
const zeroWidthChars = ['\u200B', '\u200C', '\u200D', '\u2060'];
module.exports = ({ length }) => lengthGen(length, zeroWidthChars);

@ -1,17 +0,0 @@
const fs = require('fs-extra');
const crypto = require('crypto');
const toArray = require('stream-to-array');
const { log } = require('./utils');
/**
* Generates a SHA1 hash for the provided file
* @param {*} file The file to hash
* @returns The SHA1 hash
*/
module.exports = (file) =>
new Promise((resolve, reject) =>
toArray((fs.createReadStream(file.path)))
.then((parts) => Buffer.concat(parts.map((part) => (Buffer.isBuffer(part) ? part : Buffer.from(part)))))
.then((buf) => crypto.createHash('sha1').update(buf).digest('hex')) // skipcq: JS-D003
.then((hash) => log.debug(`Hash for ${file.originalname}`, hash, 'SHA1, hex').callback(resolve, hash))
.catch(reject));

2056
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{
"name": "ass",
"version": "0.8.7",
"version": "0.9.0",
"description": "The superior self-hosted ShareX server",
"main": "ass.js",
"engines": {
@ -8,17 +8,19 @@
"npm": ">=7.x.x"
},
"scripts": {
"start": "node ass.js",
"setup": "node setup.js",
"metrics": "node metrics.js",
"new-token": "node generators/token.js",
"restart": "systemctl restart ass.service",
"engine-check": "node checkEngine.js",
"dev": "npm run build && npm start",
"build": "ts",
"start": "node dist/ass.js",
"setup": "node dist/setup.js",
"metrics": "node dist/metrics.js",
"new-token": "node dist/generators/token.js",
"engine-check": "node dist/checkEngine.js",
"prestart": "npm run engine-check",
"presetup": "npm run engine-check",
"logs": "node ./node_modules/@tycrek/log/socketClient.js",
"docker-logs": "docker-compose logs -f --tail=50 --no-log-prefix ass",
"docker-update": "git pull && docker-compose up --force-recreate --build -d && docker image prune -f",
"docker-update": "git pull && npm run docker-uplite",
"docker-uplite": "docker-compose up --force-recreate --build -d && docker image prune -f",
"docker-upfull": "npm run docker-update && npm run docker-resetup",
"docker-resetup": "docker-compose exec ass npm run setup && docker-compose restart"
},
"repository": "github:tycrek/ass",
@ -35,6 +37,7 @@
"url": "https://patreon.com/tycrek"
},
"dependencies": {
"@tsconfig/node14": "^1.0.1",
"@tycrek/express-nofavicon": "^1.0.3",
"@tycrek/isprod": "^2.0.2",
"@tycrek/log": "^0.5.9",
@ -54,13 +57,28 @@
"jimp": "^0.16.1",
"luxon": "^1.26.0",
"marked": "^2.0.7",
"node-fetch": "^2.6.1",
"node-fetch": "^2.6.2",
"node-vibrant": "*",
"prompt": "^1.1.0",
"pug": "^3.0.2",
"sanitize-filename": "^1.6.3",
"stream-to-array": "^2.3.0",
"submodule": "^1.2.1",
"ts": "^0.2.2",
"uuid": "^8.3.2"
},
"devDependencies": {
"@types/escape-html": "^1.0.1",
"@types/express": "^4.17.13",
"@types/express-busboy": "^8.0.0",
"@types/ffmpeg-static": "^3.0.0",
"@types/fs-extra": "^9.0.12",
"@types/luxon": "^2.0.3",
"@types/marked": "^3.0.0",
"@types/node": "^16.9.0",
"@types/node-fetch": "^2.5.12",
"@types/stream-to-array": "^2.3.0",
"@types/uuid": "^8.3.1",
"@types/ws": "^7.4.7"
}
}

@ -1,7 +1,9 @@
import { AssRequest, AssResponse, ErrWrap } from './definitions';
let doSetup = null;
try {
// Check if config.json exists
require('./config.json');
require('../config.json');
} catch (err) {
doSetup = require('./setup').doSetup;
}
@ -9,23 +11,24 @@ try {
// Run first time setup if using Docker (pseudo-process, setup will be run with docker exec)
if (doSetup) {
doSetup();
// @ts-ignore
return;
}
// Load the config
const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile } = require('./config.json');
const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile } = require('../config.json');
//#region Imports
const fs = require('fs-extra');
const express = require('express');
import fs from 'fs-extra';
import express from 'express';
const nofavicon = require('@tycrek/express-nofavicon');
const helmet = require('helmet');
const marked = require('marked');
const uploadRouter = require('./routers/upload');
const resourceRouter = require('./routers/resource');
const { path, log, getTrueHttp, getTrueDomain } = require('./utils');
const { CODE_INTERNAL_SERVER_ERROR } = require('./MagicNumbers.json');
const { name: ASS_NAME, version: ASS_VERSION } = require('./package.json');
import helmet from 'helmet';
import marked from 'marked';
import uploadRouter from './routers/upload';
import resourceRouter from './routers/resource';
import { path, log, getTrueHttp, getTrueDomain } from './utils';
const { CODE_INTERNAL_SERVER_ERROR } = require('../MagicNumbers.json');
const { name: ASS_NAME, version: ASS_VERSION } = require('../package.json');
//#endregion
// Welcome :D
@ -39,8 +42,8 @@ const ROUTERS = {
};
// Read users and data
const users = require('./auth');
const data = require('./data');
import { users } from './auth';
import { data } from './data';
//#endregion
// Enable/disable Express features
@ -66,28 +69,28 @@ useSsl && app.use(helmet.hsts({ preload: true })); // skipcq: JS-0093
app.use(nofavicon);
// Use custom index, otherwise render README.md
const ASS_INDEX = indexFile !== '' && fs.existsSync(path('share', indexFile)) && require(`./share/${indexFile}`);
const ASS_INDEX = indexFile !== '' && fs.existsSync(path('share', indexFile)) && require(`../share/${indexFile}`);
const ASS_INDEX_ENABLED = typeof ASS_INDEX === typeof Function;
app.get('/', (req, res, next) => ASS_INDEX_ENABLED // skipcq: JS-0229
? ASS_INDEX(req, res, next)
: fs.readFile(path('.github', 'README.md'))
.then((bytes) => bytes.toString())
.then(marked)
.then((data) => marked(data))
.then((d) => res.render('index', { data: d }))
.catch(next));
// Set up custom frontend
const ASS_FRONTEND = fs.existsSync(`./${frontendName}/package.json`) ? (require('submodule'), require(`./${frontendName}`)) : { enabled: false };
const ASS_FRONTEND = fs.existsSync(path(`./${frontendName}/package.json`)) ? (require('submodule'), require(`../${frontendName}`)) : { enabled: false }; // todo: update with src/
ASS_FRONTEND.enabled && app.use(ASS_FRONTEND.endpoint, ASS_FRONTEND.router); // skipcq: JS-0093
// Upload router (has to come after custom frontends as express-busboy interferes with all POST calls)
app.use('/', ROUTERS.upload);
// '/:resouceId' always needs to be LAST since it's a catch-all route
app.use('/:resourceId', (req, _res, next) => (req.resourceId = req.params.resourceId, next()), ROUTERS.resource); // skipcq: JS-0086, JS-0090
app.use('/:resourceId', (req: AssRequest, _res, next) => (req.resourceId = req.params.resourceId, next()), ROUTERS.resource); // skipcq: JS-0086, JS-0090
// Error handler
app.use((err, _req, res, _next) => log.error(err).err(err).callback(() => res.sendStatus(CODE_INTERNAL_SERVER_ERROR))); // skipcq: JS-0128
app.use((err: ErrWrap, _req: AssRequest, res: AssResponse, _next: Function) => log.error(err).err(err).callback(() => res.sendStatus(CODE_INTERNAL_SERVER_ERROR))); // skipcq: JS-0128
// Host the server
log

@ -2,20 +2,19 @@
* Used for global auth management
*/
const fs = require('fs-extra');
const { log, path, arrayEquals } = require('./utils');
import fs from 'fs-extra';
import { log, path, arrayEquals } from './utils';
const users = require('./auth.json').users || {};
export const users = require('../auth.json').users || {};
// Monitor auth.json for changes (triggered by running 'npm run new-token')
fs.watch(path('auth.json'), { persistent: false },
(eventType) => eventType === 'change' && fs.readJson(path('auth.json'))
.then((json) => {
(eventType: String) => eventType === 'change' && fs.readJson(path('auth.json'))
.then((json: { users: JSON[] }) => {
if (!(arrayEquals(Object.keys(users), Object.keys(json.users)))) {
// @ts-ignore
Object.keys(json.users).forEach((token) => (!Object.prototype.hasOwnProperty.call(users, token)) && (users[token] = json.users[token]));
log.info('New token added', Object.keys(users)[Object.keys(users).length - 1] || 'No new token');
}
})
.catch(log.c.error));
module.exports = users;

@ -1,5 +1,5 @@
const check = require("check-node-version");
const ENGINES = require('./package.json').engines;
const ENGINES = require('../package.json').engines;
const TLog = require('@tycrek/log');
const logger = new TLog();

@ -6,7 +6,7 @@
const { JsonDataEngine } = require('@tycrek/papito');
// Actual data engine
const { dataEngine } = require('./config.json');
const { dataEngine } = require('../config.json');
const { _ENGINE_ } = require(dataEngine);
module.exports = _ENGINE_(new JsonDataEngine());
export const data = _ENGINE_(new JsonDataEngine());

@ -0,0 +1,63 @@
import { Request, Response } from "express";
export interface User {
token: string
username: string
}
export interface FileData {
// Data from Multer file object
path: string
size: number
mimetype: string
ext: string
originalname: string
// Data from ass
randomId: string
deleteId: string
is: IsPossible
thumbnail: string
vibrant: string
sha1: string
domain: string
timestamp: number
timeoffset: string
token: string
opengraph: OpenGraphData
// I found this in utils and idk where it comes from
destination: string
}
export interface IsPossible {
image: boolean
video: boolean
audio: boolean
other: boolean
}
export interface OpenGraphData {
title?: string | string[]
description?: string | string[]
author?: string | string[]
authorUrl?: string | string[]
provider?: string | string[]
providerUrl?: string | string[]
color?: string | string[]
}
export interface AssRequest extends Request {
resourceId?: string
ass?: { resourceId: string }
token?: string
file?: FileData
}
export interface AssResponse extends Response {
}
export interface ErrWrap extends Error {
code?: number | string
}

@ -0,0 +1,23 @@
import fs from 'fs-extra';
// Don't trigger circular dependency during setup
if (require !== undefined && !require?.main?.filename.includes('setup.js'))
var MIN_LENGTH = require('../setup').gfyIdSize; // skipcq: JS-0239, JS-0102
function getWord(list: string[], delim = '') {
return list[Math.floor(Math.random() * list.length)].concat(delim);
}
function genString(count = MIN_LENGTH) {
// For some reason these 3 lines MUST be inside the function
const { path } = require('../utils');
const adjectives = fs.readFileSync(path('./gfycat/adjectives.txt')).toString().split('\n');
const animals = fs.readFileSync(path('./gfycat/animals.txt')).toString().split('\n');
let gfycat = '';
for (let i = 0; i < (count < MIN_LENGTH ? MIN_LENGTH : count); i++)
gfycat += getWord(adjectives, '-');
return gfycat.concat(getWord(animals));
};
export default ({ gfyLength }: { gfyLength: number }) => genString(gfyLength);

@ -0,0 +1,2 @@
import { randomBytes } from 'crypto';
export default (length: number, charset: string[]): string => [...randomBytes(length)].map((byte) => charset[Number(byte) % charset.length]).join('').slice(1).concat(charset[0]);

@ -0,0 +1,2 @@
import cryptoRandomString from 'crypto-random-string';
export default ({ length }: { length: number }) => cryptoRandomString({ length, type: 'alphanumeric' });

@ -1,18 +1,19 @@
const uuid = require('uuid').v4;
const fs = require('fs-extra');
const path = require('path');
const randomGen = require('./random');
import { v4 as uuid } from 'uuid';
import fs from 'fs-extra';
import path from 'path';
import randomGen from './random';
const TLog = require('@tycrek/log');
const log = new TLog();
const MAX_USERNAME = 20;
export default () => uuid().replace(/-/g, '');
module.exports = () => uuid().replace(/-/g, '');
// If directly called on the command line, generate a new token
if (require.main === module) {
const token = module.exports();
const authPath = path.join(__dirname, '..', 'auth.json');
const authPath = path.join(process.cwd(), 'auth.json');
let name = '';
fs.readJson(authPath)
@ -20,7 +21,7 @@ if (require.main === module) {
// Generate the user
const username = process.argv[2] ? process.argv[2].replace(/[^\da-z_]/gi, '').substring(0, MAX_USERNAME) : randomGen({ length: 20 }); // skipcq: JS-0074
if (!auth.users) auth.users = {};
if (Object.values(auth.users).findIndex((user) => user.username === username) !== -1) {
if (Object.values(auth.users).findIndex((user: any) => user.username === username) !== -1) {
log.error('Username already exists', username);
process.exit(1);
}

@ -0,0 +1,3 @@
import lengthGen from './lengthGen';
const zeroWidthChars = ['\u200B', '\u200C', '\u200D', '\u2060'];
export default ({ length }: { length: number }) => lengthGen(length, zeroWidthChars);

@ -0,0 +1,18 @@
import { FileData } from './definitions';
import fs from 'fs-extra';
import crypto from 'crypto';
import toArray from 'stream-to-array';
import { log } from './utils';
/**
* Generates a SHA1 hash for the provided file
* @param {*} file The file to hash
* @returns The SHA1 hash
*/
export default (file: FileData) =>
new Promise((resolve, reject) =>
toArray((fs.createReadStream(file.path)))
.then((parts: any[]) => Buffer.concat(parts.map((part: any) => (Buffer.isBuffer(part) ? part : Buffer.from(part)))))
.then((buf: Buffer) => crypto.createHash('sha1').update(buf).digest('hex')) // skipcq: JS-D003
.then((hash: String) => log.debug(`Hash for ${file.originalname}`, hash, 'SHA1, hex').callback(resolve, hash))
.catch(reject));

@ -0,0 +1,22 @@
const TLog = require('@tycrek/log');
// Set up logging
const logger = new TLog({
level: process.env.LOG_LEVEL || (process.env.NODE_ENV === 'production' ? 'info' : 'debug'),
timestamp: {
enabled: true,
colour: 'grey',
preset: 'DATETIME_MED'
},
});
// Enable the Express logger
logger.enable.express({ handle500: false }).debug('Plugin enabled', 'Express');
/**
* @type {TLog}
*/
// yeet
export default logger;

@ -1,6 +1,6 @@
const fs = require('fs-extra');
const path = require('path');
const { s3enabled } = require('./config.json');
const { s3enabled } = require('../config.json');
const { formatBytes } = require('./utils');
const { bucketSize } = require('./storage');
@ -8,8 +8,8 @@ const TLog = require('@tycrek/log');
const log = new TLog({ level: 'debug', timestamp: { enabled: false } });
module.exports = () => {
const data = require('./data');
const { users } = fs.readJsonSync(path.join(__dirname, 'auth.json'));
const data = require('./data').data;
const { users } = fs.readJsonSync(path.join(process.cwd(), 'auth.json'));
Object.keys(users).forEach((token) => users[token].count = 0);
let totalSize = 0;

@ -1,35 +1,39 @@
const fs = require('fs-extra');
const escape = require('escape-html');
const fetch = require('node-fetch');
const { deleteS3 } = require('../storage');
const { diskFilePath, s3enabled, viewDirect } = require('../config.json');
const { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } = require('../utils');
const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../MagicNumbers.json');
const data = require('../data');
const users = require('../auth');
const express = require('express');
import { FileData, IsPossible, AssRequest, AssResponse } from '../definitions';
import fs from 'fs-extra';
import escape from 'escape-html';
import fetch, { Response } from 'node-fetch';
import { deleteS3 } from '../storage';
const { diskFilePath, s3enabled, viewDirect } = require('../../config.json');
import { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } from '../utils';
const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../../MagicNumbers.json');
import { data } from '../data';
import { users } from '../auth';
import express from 'express';
const router = express.Router();
// Middleware for parsing the resource ID and handling 404
router.use((req, res, next) => {
router.use((req: AssRequest, res: AssResponse, next) => {
// Parse the resource ID
req.ass = { resourceId: escape(req.resourceId || '').split('.')[0] };
// If the ID is invalid, return 404. Otherwise, continue normally
data.has(req.ass.resourceId)
.then((has) => has ? next() : res.sendStatus(CODE_NOT_FOUND)) // skipcq: JS-0229
.then((has: boolean) => has ? next() : res.sendStatus(CODE_NOT_FOUND)) // skipcq: JS-0229
.catch(next);
});
// View file
router.get('/', (req, res, next) => data.get(req.ass.resourceId).then((fileData) => {
const { resourceId } = req.ass;
router.get('/', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.resourceId).then((fileData: FileData) => {
const resourceId = req.ass!.resourceId;
// Build OpenGraph meta tags
const og = fileData.opengraph, ogs = [''];
og.title && (ogs.push(`<meta property="og:title" content="${og.title}">`)); // skipcq: JS-0093
og.description && (ogs.push(`<meta property="og:description" content="${og.description}">`)); // skipcq: JS-0093
// todo: figure out how to not ignore this
// @ts-ignore
og.color && (ogs.push(`<meta name="theme-color" content="${getResourceColor(og.color, fileData.vibrant)}">`)); // skipcq: JS-0093
!fileData.is.video && (ogs.push(`<meta name="twitter:card" content="summary_large_image">`)); // skipcq: JS-0093
@ -41,6 +45,8 @@ router.get('/', (req, res, next) => data.get(req.ass.resourceId).then((fileData)
uploader: users[fileData.token].username,
timestamp: formatTimestamp(fileData.timestamp, fileData.timeoffset),
size: formatBytes(fileData.size),
// todo: figure out how to not ignore this
// @ts-ignore
color: getResourceColor(fileData.opengraph.color || null, fileData.vibrant),
resourceAttr: { src: getDirectUrl(resourceId) },
discordUrl: `${getDirectUrl(resourceId)}${fileData.ext}`,
@ -53,19 +59,19 @@ router.get('/', (req, res, next) => data.get(req.ass.resourceId).then((fileData)
}).catch(next));
// Direct resource
router.get('/direct*', (req, res, next) => data.get(req.ass.resourceId).then((fileData) => {
router.get('/direct*', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.resourceId).then((fileData: FileData) => {
// Send file as an attachement for downloads
if (req.query.download)
res.header('Content-Disposition', `attachment; filename="${fileData.originalname}"`);
// Return the file differently depending on what storage option was used
const uploaders = {
s3: () => fetch(getS3url(fileData.randomId, fileData.ext)).then((file) => {
s3: () => fetch(getS3url(fileData.randomId, fileData.ext)).then((file: Response) => {
file.headers.forEach((value, header) => res.setHeader(header, value));
file.body.pipe(res);
file.body?.pipe(res);
}),
local: () => {
res.header('Accept-Ranges', 'bytes').header('Content-Length', fileData.size).type(fileData.mimetype);
res.header('Accept-Ranges', 'bytes').header('Content-Length', `${fileData.size}`).type(fileData.mimetype);
fs.createReadStream(fileData.path).pipe(res);
}
};
@ -74,33 +80,37 @@ router.get('/direct*', (req, res, next) => data.get(req.ass.resourceId).then((fi
}).catch(next));
// Thumbnail response
router.get('/thumbnail', (req, res, next) =>
data.get(req.ass.resourceId)
.then(({ is, thumbnail }) => fs.readFile((!is || (is.image || is.video)) ? path(diskFilePath, 'thumbnails/', thumbnail) : is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png'))
.then((fileData) => res.type('jpg').send(fileData))
router.get('/thumbnail', (req: AssRequest, res: AssResponse, next) =>
data.get(req.ass?.resourceId)
.then(({ is, thumbnail }: { is: IsPossible, thumbnail: string }) => fs.readFile((!is || (is.image || is.video)) ? path(diskFilePath, 'thumbnails/', thumbnail) : is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png'))
.then((fileData: Buffer) => res.type('jpg').send(fileData))
.catch(next));
// oEmbed response for clickable authors/providers
// https://oembed.com/
// https://old.reddit.com/r/discordapp/comments/82p8i6/a_basic_tutorial_on_how_to_get_the_most_out_of/
router.get('/oembed', (req, res, next) =>
data.get(req.ass.resourceId)
.then(({ opengraph, is, size, timestamp, timeoffset, originalname }) =>
router.get('/oembed', (req: AssRequest, res: AssResponse, next) =>
data.get(req.ass?.resourceId)
.then((fileData: FileData) =>
res.type('json').send({
version: '1.0',
type: is.video ? 'video' : is.image ? 'photo' : 'link',
author_url: opengraph.authorUrl,
provider_url: opengraph.providerUrl,
author_name: replaceholder(opengraph.author || '', size, timestamp, timeoffset, originalname),
provider_name: replaceholder(opengraph.provider || '', size, timestamp, timeoffset, originalname)
type: fileData.is.video ? 'video' : fileData.is.image ? 'photo' : 'link',
author_url: fileData.opengraph.authorUrl,
provider_url: fileData.opengraph.providerUrl,
// todo: figure out how to not ignore this
// @ts-ignore
author_name: replaceholder(fileData.opengraph.author || '', fileData.size, fileData.timestamp, fileData.timeoffset, fileData.originalname),
// todo: figure out how to not ignore this
// @ts-ignore
provider_name: replaceholder(fileData.opengraph.provider || '', fileData.size, fileData.timestamp, fileData.timeoffset, fileData.originalname)
}))
.catch(next));
// Delete file
router.get('/delete/:deleteId', (req, res, next) => {
let oldName, oldType; // skipcq: JS-0119
data.get(req.ass.resourceId)
.then((fileData) => {
router.get('/delete/:deleteId', (req: AssRequest, res: AssResponse, next) => {
let oldName: string, oldType: string; // skipcq: JS-0119
data.get(req.ass?.resourceId)
.then((fileData: FileData) => {
// Extract info for logs
oldName = fileData.originalname;
oldType = fileData.mimetype;
@ -117,9 +127,9 @@ router.get('/delete/:deleteId', (req, res, next) => {
(!fileData.is || (fileData.is.image || fileData.is.video)) && fs.existsSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail))
? fs.rmSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) : () => Promise.resolve()]);
})
.then(() => data.del(req.ass.resourceId))
.then(() => data.del(req.ass?.resourceId))
.then(() => (log.success('Deleted', oldName, oldType), res.type('text').send('File has been deleted!'))) // skipcq: JS-0090
.catch(next);
});
module.exports = router;
export default router;

@ -1,24 +1,27 @@
const fs = require('fs-extra');
const bb = require('express-busboy');
import { FileData, AssRequest, AssResponse, ErrWrap, User } from "../definitions";
import fs from 'fs-extra';
import bb from 'express-busboy';
//const rateLimit = require('express-rate-limit');
const { DateTime } = require('luxon');
const { Webhook, MessageBuilder } = require('discord-webhook-node');
const { processUploaded } = require('../storage');
const { maxUploadSize, resourceIdSize, gfyIdSize, resourceIdType, spaceReplace } = require('../config.json');
const { path, log, verify, getTrueHttp, getTrueDomain, generateId, formatBytes } = require('../utils');
const { CODE_UNAUTHORIZED, CODE_PAYLOAD_TOO_LARGE } = require('../MagicNumbers.json');
const data = require('../data');
const users = require('../auth');
import { DateTime } from 'luxon';
import { Webhook, MessageBuilder } from 'discord-webhook-node';
import { processUploaded } from '../storage';
const { maxUploadSize, resourceIdSize, gfyIdSize, resourceIdType, spaceReplace } = require('../../config.json');
import { path, log, verify, getTrueHttp, getTrueDomain, generateId, formatBytes } from '../utils';
const { CODE_UNAUTHORIZED, CODE_PAYLOAD_TOO_LARGE } = require('../../MagicNumbers.json');
import { data } from '../data';
import { users } from '../auth';
const ASS_LOGO = 'https://cdn.discordapp.com/icons/848274994375294986/8d339d4a2f3f54b2295e5e0ff62bd9e6.png?size=1024';
const express = require('express');
import express from 'express';
const router = express.Router();
// Set up express-busboy
// @ts-ignore
bb.extend(router, {
upload: true,
restrictMultiple: true,
allowedPath: (url) => url === '/',
allowedPath: (url: string) => url === '/',
});
// Rate limit middleware
@ -28,7 +31,7 @@ bb.extend(router, {
})); */
// Block unauthorized requests and attempt token sanitization
router.post('/', (req, res, next) => {
router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
req.headers.authorization = req.headers.authorization || '';
req.token = req.headers.authorization.replace(/[^\da-z]/gi, ''); // Strip anything that isn't a digit or ASCII letter
!verify(req, users) ? log.warn('Upload blocked', 'Unauthorized').callback(() => res.sendStatus(CODE_UNAUTHORIZED)) : next(); // skipcq: JS-0093
@ -38,28 +41,28 @@ router.post('/', (req, res, next) => {
router.post('/', processUploaded);
// Max upload size error handling
router.use('/', (err, _req, res, next) => err.message === 'LIMIT_FILE_SIZE' ? log.warn('Upload blocked', 'File too large').callback(() => res.status(CODE_PAYLOAD_TOO_LARGE).send(`Max upload size: ${maxUploadSize}MB`)) : next(err)); // skipcq: JS-0229
router.use('/', (err: ErrWrap, _req: AssRequest, res: AssResponse, next: Function) => err.message === 'LIMIT_FILE_SIZE' ? log.warn('Upload blocked', 'File too large').callback(() => res.status(CODE_PAYLOAD_TOO_LARGE).send(`Max upload size: ${maxUploadSize}MB`)) : next(err)); // skipcq: JS-0229
// Process uploaded file
router.post('/', (req, res, next) => {
router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
// Load overrides
const trueDomain = getTrueDomain(req.headers['x-ass-domain']);
const generator = req.headers['x-ass-access'] || resourceIdType;
// Save domain with file
req.file.domain = `${getTrueHttp()}${trueDomain}`;
req.file!.domain = `${getTrueHttp()}${trueDomain}`;
// Get the uploaded time in milliseconds
req.file.timestamp = DateTime.now().toMillis();
req.file!.timestamp = DateTime.now().toMillis();
// Save the timezone offset
req.file.timeoffset = req.headers['x-ass-timeoffset'] || 'UTC+0';
req.file!.timeoffset = req.headers['x-ass-timeoffset']?.toString() || 'UTC+0';
// Keep track of the token that uploaded the resource
req.file.token = req.token;
req.file!.token = req.token ?? '';
// Attach any embed overrides, if necessary
req.file.opengraph = {
req.file!.opengraph = {
title: req.headers['x-ass-og-title'],
description: req.headers['x-ass-og-description'],
author: req.headers['x-ass-og-author'],
@ -70,13 +73,13 @@ router.post('/', (req, res, next) => {
};
// Fix spaces in originalname
req.file.originalname = req.file.originalname.replace(/\s/g, spaceReplace === '!' ? '' : spaceReplace);
req.file!.originalname = req.file!.originalname.replace(/\s/g, spaceReplace === '!' ? '' : spaceReplace);
// Generate a unique resource ID
let resourceId = '';
// Function to call to generate a fresh ID. Used for multiple attempts in case an ID is already taken
const gen = () => generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file.originalname);
const gen = () => generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file!.originalname);
// Keeps track of the number of attempts in case all ID's are taken
const attempts = {
@ -85,11 +88,11 @@ router.post('/', (req, res, next) => {
};
// Called by a promise, this will recursively resolve itself until a unique ID is found
function genCheckId(resolve, reject) {
function genCheckId(resolve: Function, reject: Function) {
const uniqueId = gen();
attempts.count++;
data.has(uniqueId)
.then((exists) => {
.then((exists: boolean) => {
log.debug('ID check', exists ? 'Taken' : 'Available');
return attempts.count - 1 >= attempts.max ? reject(new Error('No ID\'s remaining')) : exists ? genCheckId(resolve, reject) : resolve(uniqueId);
})
@ -98,19 +101,20 @@ router.post('/', (req, res, next) => {
new Promise((resolve, reject) => genCheckId(resolve, reject))
.then((uniqueId) => {
//@ts-ignore
resourceId = uniqueId;
log.debug('Saving data', data.name);
})
.then(() => data.put(resourceId.split('.')[0], req.file))
.then(() => {
// Log the upload
const logInfo = `${req.file.originalname} (${req.file.mimetype}, ${formatBytes(req.file.size)})`;
log.success('File uploaded', logInfo, `uploaded by ${users[req.token] ? users[req.token].username : '<token-only>'}`);
const logInfo = `${req.file!.originalname} (${req.file!.mimetype}, ${formatBytes(req.file!.size)})`;
log.success('File uploaded', logInfo, `uploaded by ${users[req.token ?? ''] ? users[req.token ?? ''].username : '<token-only>'}`);
// Build the URLs
const resourceUrl = `${getTrueHttp()}${trueDomain}/${resourceId}`;
const thumbnailUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/thumbnail`;
const deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file.deleteId}`;
const deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file!.deleteId}`;
// Send the response
res.type('json').send({ resource: resourceUrl, thumbnail: thumbnailUrl, delete: deleteUrl })
@ -121,17 +125,18 @@ router.post('/', (req, res, next) => {
if (req.headers['x-ass-webhook-url']) {
// Build the webhook
const hook = new Webhook(req.headers['x-ass-webhook-url']);
hook.setUsername(req.headers['x-ass-webhook-username'] || 'ass');
hook.setAvatar(req.headers['x-ass-webhook-avatar'] || ASS_LOGO);
const hook = new Webhook(req.headers['x-ass-webhook-url']?.toString());
hook.setUsername(req.headers['x-ass-webhook-username']?.toString() || 'ass');
hook.setAvatar(req.headers['x-ass-webhook-avatar']?.toString() || ASS_LOGO);
// Build the embed
const embed = new MessageBuilder()
.setTitle(logInfo)
//@ts-ignore
.setURL(resourceUrl)
.setDescription(`**Size:** \`${formatBytes(req.file.size)}\`\n**[Delete](${deleteUrl})**`)
.setDescription(`**Size:** \`${formatBytes(req.file!.size)}\`\n**[Delete](${deleteUrl})**`)
.setThumbnail(thumbnailUrl)
.setColor(req.file.vibrant)
.setColor(req.file!.vibrant)
.setTimestamp();
// Send the embed to the webhook, then delete the client after to free resources
@ -142,19 +147,25 @@ router.post('/', (req, res, next) => {
}
// Also update the users upload count
if (!users[req.token]) {
const generateUsername = () => generateId('random', 20, null); // skipcq: JS-0074
let username = generateUsername();
while (Object.values(users).findIndex((user) => user.username === username) !== -1) // skipcq: JS-0073
if (!users[req.token ?? '']) {
const generateUsername = () => generateId('random', 20, 0, req.file!.size.toString()); // skipcq: JS-0074
let username: string = generateUsername();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
while (Object.values(users).findIndex((user: User) => user.username === username) !== -1) // skipcq: JS-0073
username = generateUsername();
users[req.token] = { username, count: 0 };
users[req.token ?? ''] = { username, count: 0 };
}
users[req.token].count += 1;
users[req.token ?? ''].count += 1;
fs.writeJsonSync(path('auth.json'), { users }, { spaces: 4 });
log.debug('Upload request flow completed', '');
});
}).catch(next);
})
//@ts-ignore
.catch(next);
});
module.exports = router;
export default router;

@ -55,7 +55,7 @@ function getConfirmSchema(description) {
// If directly called on the command line, run setup script
function doSetup() {
const path = (...paths) => require('path').join(__dirname, ...paths);
const path = (...paths) => require('path').join(process.cwd(), ...paths);
const TLog = require('@tycrek/log');
const fs = require('fs-extra');
const prompt = require('prompt');
@ -66,7 +66,7 @@ function doSetup() {
// Override default configs with existing configs to allow migrating configs
// Now that's a lot of configs!
try {
const existingConfig = require('./config.json');
const existingConfig = require('../config.json');
Object.entries(existingConfig).forEach(([key, value]) => {
Object.prototype.hasOwnProperty.call(config, key) && (config[key] = value); // skipcq: JS-0093
Object.prototype.hasOwnProperty.call(s3config, key) && (s3config[key] = value); // skipcq: JS-0093
@ -278,7 +278,7 @@ function doSetup() {
// Complete & exit
.then(() => log.blank().success('Setup complete').callback(() => process.exit(0)))
.catch((err) => log.blank().error(err));
.catch((err) => log.blank().error(err).callback(() => process.exit(1)));
}
module.exports = {

@ -3,12 +3,12 @@
const fs = require('fs-extra');
const aws = require('aws-sdk');
const Thumbnail = require('./thumbnails');
const Vibrant = require('./vibrant');
const Hash = require('./hash');
const Thumbnail = require('./thumbnails').default;
const Vibrant = require('./vibrant').default;
const Hash = require('./hash').default;
const { getDatedDirname, generateId, log } = require('./utils');
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, saveAsOriginal, mediaStrict, maxUploadSize } = require('./config.json');
const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('./MagicNumbers.json');
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, saveAsOriginal, mediaStrict, maxUploadSize } = require('../config.json');
const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('../MagicNumbers.json');
const ID_GEN_LENGTH = 32;
const ALLOWED_MIMETYPES = /(image)|(video)|(audio)\//;

@ -1,9 +1,11 @@
const ffmpeg = require('ffmpeg-static');
const Jimp = require('jimp');
const shell = require('any-shell-escape');
const { exec } = require('child_process');
const { isProd, path } = require('./utils');
const { diskFilePath } = require('./config.json');
import { FileData } from "./definitions";
import ffmpeg from 'ffmpeg-static';
import Jimp from 'jimp';
// @ts-ignore
import shell from 'any-shell-escape';
import { exec } from 'child_process';
import { isProd, path } from './utils';
const { diskFilePath } = require('../config.json');
// Thumbnail parameters
const THUMBNAIL = {
@ -18,7 +20,7 @@ const THUMBNAIL = {
* @param {String} dest Path of the output file
* @returns {String} The command to execute
*/
function getCommand(src, dest) {
function getCommand(src: String, dest: String) {
return shell([
ffmpeg, '-y',
'-v', (isProd ? 'error' : 'debug'), // Log level
@ -35,7 +37,7 @@ function getCommand(src, dest) {
* @param {String} oldName The original filename
* @returns {String} The filename for the thumbnail
*/
function getNewName(oldName) {
function getNewName(oldName: String) {
return oldName.concat('.thumbnail.jpg');
}
@ -44,7 +46,7 @@ function getNewName(oldName) {
* @param {String} oldName The original filename
* @returns {String} The path to the thumbnail
*/
function getNewNamePath(oldName) {
function getNewNamePath(oldName: String) {
return path(diskFilePath, 'thumbnails/', getNewName(oldName));
}
@ -52,10 +54,11 @@ function getNewNamePath(oldName) {
* Extracts an image from a video file to use as a thumbnail, using ffmpeg
* @param {*} file The video file to pull a frame from
*/
function getVideoThumbnail(file) {
return new Promise((resolve, reject) => exec(
function getVideoThumbnail(file: FileData) {
return new Promise((resolve: Function, reject: Function) => exec(
getCommand(file.path, getNewNamePath(file.randomId)),
(err) => (err ? reject(err) : resolve())
// @ts-ignore
(err: Error) => (err ? reject(err) : resolve())
));
}
@ -63,7 +66,7 @@ function getVideoThumbnail(file) {
* Generates a thumbnail for the provided image
* @param {*} file The file to generate a thumbnail for
*/
function getImageThumbnail(file) {
function getImageThumbnail(file: FileData) {
return new Promise((resolve, reject) =>
Jimp.read(file.path)
.then((image) => image
@ -79,7 +82,7 @@ function getImageThumbnail(file) {
* @param {*} file The file to generate a thumbnail for
* @returns The thumbnail filename (NOT the path)
*/
module.exports = (file) =>
export default (file: FileData) =>
new Promise((resolve, reject) =>
(file.is.video ? getVideoThumbnail : file.is.image ? getImageThumbnail : () => Promise.resolve())(file)
.then(() => resolve((file.is.video || file.is.image) ? getNewName(file.randomId) : file.is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png'))

@ -1,39 +1,41 @@
const fs = require('fs-extra');
const Path = require('path');
const TLog = require('@tycrek/log');
const fetch = require('node-fetch');
const sanitize = require('sanitize-filename');
const { DateTime } = require('luxon');
const token = require('./generators/token');
const zwsGen = require('./generators/zws');
const randomGen = require('./generators/random');
const gfyGen = require('./generators/gfycat');
const { HTTP, HTTPS, KILOBYTES } = require('./MagicNumbers.json');
import { AssRequest, FileData } from './definitions';
import fs from 'fs-extra';
import Path from 'path';
import fetch from 'node-fetch';
import sanitize from 'sanitize-filename';
import { DateTime } from 'luxon';
import token from './generators/token';
import zwsGen from './generators/zws';
import randomGen from './generators/random';
import gfyGen from './generators/gfycat';
import logger from './logger';
const { HTTP, HTTPS, KILOBYTES } = require('../MagicNumbers.json');
// Catch config.json not existing when running setup script
try {
var { useSsl, port, domain, isProxied, diskFilePath, saveWithDate, s3bucket, s3endpoint, s3usePathStyle } = require('./config.json'); // skipcq: JS-0239, JS-0102
var { useSsl, port, domain, isProxied, diskFilePath, saveWithDate, s3bucket, s3endpoint, s3usePathStyle } = require('../config.json'); // skipcq: JS-0239, JS-0102
} catch (ex) {
// @ts-ignore
if (ex.code !== 'MODULE_NOT_FOUND') console.error(ex);
}
function getTrueHttp() {
export function getTrueHttp() {
return ('http').concat(useSsl ? 's' : '').concat('://');
}
function getTrueDomain(d = domain) {
export function getTrueDomain(d = domain) {
return d.concat((port === HTTP || port === HTTPS || isProxied) ? '' : `:${port}`);
}
function getS3url(s3key, ext) {
export function getS3url(s3key: string, ext: string) {
return `https://${s3usePathStyle ? `${s3endpoint}/${s3bucket}` : `${s3bucket}.${s3endpoint}`}/${s3key}${ext}`;
}
function getDirectUrl(resourceId) {
export function getDirectUrl(resourceId: string) {
return `${getTrueHttp()}${getTrueDomain()}/${resourceId}/direct`;
}
function randomHexColour() { // From: https://www.geeksforgeeks.org/javascript-generate-random-hex-codes-color/
export function randomHexColour() { // From: https://www.geeksforgeeks.org/javascript-generate-random-hex-codes-color/
const letters = '0123456789ABCDEF';
let colour = '#';
for (let i = 0; i < 6; i++) // skipcq: JS-0074
@ -41,29 +43,29 @@ function randomHexColour() { // From: https://www.geeksforgeeks.org/javascript-g
return colour;
}
function getResourceColor(colorValue, vibrantValue) {
export function getResourceColor(colorValue: string, vibrantValue: string) {
return colorValue === '&random' ? randomHexColour() : colorValue === '&vibrant' ? vibrantValue : colorValue;
}
function formatTimestamp(timestamp, timeoffset) {
export function formatTimestamp(timestamp: number, timeoffset: string) {
return DateTime.fromMillis(timestamp).setZone(timeoffset).toLocaleString(DateTime.DATETIME_MED);
}
function formatBytes(bytes, decimals = 2) { // skipcq: JS-0074
export function formatBytes(bytes: number, decimals = 2) { // skipcq: JS-0074
if (bytes === 0) return '0 Bytes';
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
const i = Math.floor(Math.log(bytes) / Math.log(KILOBYTES));
return parseFloat((bytes / Math.pow(KILOBYTES, i)).toFixed(decimals < 0 ? 0 : decimals)).toString().concat(` ${sizes[i]}`);
}
function replaceholder(data, size, timestamp, timeoffset, originalname) {
export function replaceholder(data: string, size: number, timestamp: number, timeoffset: string, originalname: string) {
return data
.replace(/&size/g, formatBytes(size))
.replace(/&filename/g, originalname)
.replace(/&timestamp/g, formatTimestamp(timestamp, timeoffset));
}
function getDatedDirname() {
export function getDatedDirname() {
if (!saveWithDate) return diskFilePath;
// Get current month and year
@ -73,23 +75,20 @@ function getDatedDirname() {
return `${diskFilePath}${diskFilePath.endsWith('/') ? '' : '/'}${year}-${`0${month}`.slice(-2)}`; // skipcq: JS-0074
}
// Set up pathing & the logger
const path = (...paths) => Path.join(__dirname, ...paths);
const logger = new TLog({
level: process.env.LOG_LEVEL || (process.env.NODE_ENV === 'production' ? 'info' : 'debug'),
timestamp: {
enabled: true,
colour: 'grey',
preset: 'DATETIME_MED'
},
});
// Enable the Express logger
logger
.env('ASS_ENV')
//.enable.process({ uncaughtException: false }).debug('Plugin enabled', 'Process')
.enable.express({ handle500: false }).debug('Plugin enabled', 'Express')
.enable.socket().debug('Plugin enabled', 'Socket');
export function arrayEquals(arr1: any[], arr2: any[]) {
return arr1.length === arr2.length && arr1.slice().sort().every((value: string, index: number) => value === arr2.slice().sort()[index])
};
export function verify(req: AssRequest, users: JSON) {
return req.headers.authorization && Object.prototype.hasOwnProperty.call(users, req.headers.authorization);
}
export function generateId(mode: string, length: number, gfyLength: number, originalName: string) {
return (GENERATORS.has(mode) ? GENERATORS.get(mode)({ length, gfyLength }) : originalName);
}
// Set up pathing
export const path = (...paths: string[]) => Path.join(process.cwd(), ...paths); // '..' was added to make it easier to run files after moving the project to src/
const idModes = {
zws: 'zws', // Zero-width spaces (see: https://zws.im/)
@ -103,8 +102,8 @@ GENERATORS.set(idModes.zws, zwsGen);
GENERATORS.set(idModes.r, randomGen);
GENERATORS.set(idModes.gfy, gfyGen);
export const isProd = require('@tycrek/isprod')();
module.exports = {
isProd: require('@tycrek/isprod')(),
path,
getTrueHttp,
getTrueDomain,
@ -117,26 +116,27 @@ module.exports = {
getDatedDirname,
randomHexColour,
sanitize,
verify: (req, users) => req.headers.authorization && Object.prototype.hasOwnProperty.call(users, req.headers.authorization),
renameFile: (req, newName) => new Promise((resolve, reject) => {
verify,
renameFile: (req: AssRequest, newName: string) => new Promise((resolve: Function, reject) => {
try {
const paths = [req.file.destination, newName];
fs.rename(path(req.file.path), path(...paths));
req.file.path = Path.join(...paths);
const paths = [req.file!.destination, newName];
fs.rename(path(req.file!.path), path(...paths));
req.file!.path = Path.join(...paths);
resolve();
} catch (err) {
reject(err);
}
}),
generateToken: () => token(),
generateId: (mode, length, gfyLength, originalName) => (GENERATORS.has(mode) ? GENERATORS.get(mode)({ length, gfyLength }) : originalName),
arrayEquals: (arr1, arr2) => arr1.length === arr2.length && arr1.slice().sort().every((value, index) => value === arr2.slice().sort()[index]),
downloadTempS3: (file) => new Promise((resolve, reject) =>
generateId,
arrayEquals,
downloadTempS3: (file: FileData) => new Promise((resolve: Function, reject) =>
fetch(getS3url(file.randomId, file.ext))
.then((f2) => f2.body.pipe(fs.createWriteStream(Path.join(__dirname, diskFilePath, sanitize(file.originalname))).on('close', () => resolve())))
.catch(reject)),
}
export const log = logger;
/**
* @type {TLog}
*/

@ -1,5 +1,6 @@
const Vibrant = require('node-vibrant');
const { randomHexColour } = require('./utils');
import { FileData } from './definitions';
import Vibrant from 'node-vibrant';
import { randomHexColour } from './utils';
// Vibrant parameters
const COLOR_COUNT = 256;
@ -11,13 +12,13 @@ const QUALITY = 3;
* @param {*} resolve Runs if Promise was successful
* @param {*} reject Runs if Promise failed
*/
function getVibrant(file, resolve, reject) {
function getVibrant(file: FileData, resolve: Function, reject: Function) {
Vibrant.from(file.path)
.maxColorCount(COLOR_COUNT)
.quality(QUALITY)
.getPalette()
.then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b].population - palettes[a].population)[0]].hex))
.catch(reject);
.then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b]!.population - palettes[a]!.population)[0]]!.hex))
.catch((err) => reject(err));
}
/**
@ -25,4 +26,4 @@ function getVibrant(file, resolve, reject) {
* @param {*} file The file to get a colour from
* @returns The Vibrant colour as a Hex value (or random Hex value for videos)
*/
module.exports = (file) => new Promise((resolve, reject) => !file.is.image ? resolve(randomHexColour()) : getVibrant(file, resolve, reject)); // skipcq: JS-0229
export default (file: FileData) => new Promise((resolve, reject) => !file.is.image ? resolve(randomHexColour()) : getVibrant(file, resolve, reject)); // skipcq: JS-0229

@ -0,0 +1,19 @@
{
"extends": "@tsconfig/node14/tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"lib": [
"ES2020",
"DOM"
],
"allowJs": true,
"downlevelIteration": true
},
"include": [
"src/**/*.js",
"src/**/*.ts"
],
"exclude": [
"ass-x"
]
}
Loading…
Cancel
Save