Switch codebase to TypeScript (#62)

It's about time.
pull/93/head releases/0.9.0
Josh Moore 3 years ago committed by tycrek
commit 1c4da1a849
No known key found for this signature in database
GPG Key ID: 25D74F3943625263

8
.github/README.md vendored

@ -351,14 +351,16 @@ ass has a number of pre-made npm scripts for you to use. **All** of these script
| Script | Description | | Script | Description |
| ------ | ----------- | | ------ | ----------- |
| **`start`** | Starts the ass server. This is the default script & is run with **`npm start`**. | | **`start`** | Starts the ass server. This is the default script & is run with **`npm start`**. |
| `build` | Compiles the TypeScript files into JavaScript. |
| `dev` | Chains the `build` and `compile` scripts together. |
| `setup` | Starts the easy setup process. Should be run after any updates that introduce new config options. | | `setup` | Starts the easy setup process. Should be run after any updates that introduce new config options. |
| `metrics` | Runs the metrics script. This is a simple script that outputs basic resource statistics. | | `metrics` | Runs the metrics script. This is a simple script that outputs basic resource statistics. |
| `new-token` | Generates a new API token. Accepts one parameter for specifying a username, like `npm run new-token <username>`. ass automatically detects the new token & reloads it, so there's no need to restart the server. | | `new-token` | Generates a new API token. Accepts one parameter for specifying a username, like `npm run new-token <username>`. ass automatically detects the new token & reloads it, so there's no need to restart the server. |
| `restart` | Restarts the ass server using `systemctl`. More info soon (should work fine if you have an existing `ass.service` file) |
| `engine-check` | Ensures your environment meets the minimum Node & npm version requirements. | | `engine-check` | Ensures your environment meets the minimum Node & npm version requirements. |
| `logs` | Uses the [tlog Socket plugin] to stream logs from the ass server to your terminal, with full colour support (Remember to set [`FORCE_COLOR`] if you're using Systemd) |
| `docker-logs` | Alias for `docker-compose logs -f --tail=50 --no-log-prefix ass` | | `docker-logs` | Alias for `docker-compose logs -f --tail=50 --no-log-prefix ass` |
| `docker-update` | Alias for `git pull && docker-compose up --force-recreate --build -d && docker image prune -f` | | `docker-update` | Calls `git pull` then runs the `docker-uplite` script. |
| `docker-uplite` | Alias for `docker-compose up --force-recreate --build -d && docker image prune -f` |
| `docker-upfull` | Alias for `npm run docker-update && npm run docker-resetup` |
| `docker-resetup` | Alias for `docker-compose exec ass npm run setup && docker-compose restart` | | `docker-resetup` | Alias for `docker-compose exec ass npm run setup && docker-compose restart` |
[tlog Socket plugin]: https://github.com/tycrek/tlog#socket [tlog Socket plugin]: https://github.com/tycrek/tlog#socket

@ -1,4 +1,4 @@
# ass Dockerfile v0.1.0 # ass Dockerfile v0.2.0
# authors: # authors:
# - tycrek <t@tycrek.com> (https://tycrek.com/) # - tycrek <t@tycrek.com> (https://tycrek.com/)
# - Zusier <zusier@pm.me> (https://github.com/Zusier) # - Zusier <zusier@pm.me> (https://github.com/Zusier)
@ -12,11 +12,11 @@ WORKDIR /opt/ass/
# Copy directory files (config.json, source files etc.) # Copy directory files (config.json, source files etc.)
COPY . ./ COPY . ./
# Update npm to at least v7.x.x # Update npm to at least v7.x.x,
RUN npm i -g npm@>=7 # then install dependencies
RUN npm i -g npm@>=7 typescript && \
# Install dependencies npm i --save-dev && \
RUN npm i npm run build
# Ensure these directories & files exist for compose volumes # Ensure these directories & files exist for compose volumes
RUN mkdir -p /opt/ass/uploads/thumbnails/ && \ RUN mkdir -p /opt/ass/uploads/thumbnails/ && \

@ -1 +1 @@
Subproject commit ac4fbe6d850919a4c347cf3bb69c8d2cb60cd87d Subproject commit 2b143138ef2cb7790117709882e429f5e7680d75

@ -1,4 +1,4 @@
# ass Docker compose.yaml v0.1.0 # ass Docker compose.yaml v0.1.1
# authors: # authors:
# - tycrek <t@tycrek.com> (https://tycrek.com/) # - tycrek <t@tycrek.com> (https://tycrek.com/)
# - Zusier <zusier@pm.me> (https://github.com/Zusier) # - Zusier <zusier@pm.me> (https://github.com/Zusier)
@ -11,7 +11,6 @@ services:
restart: unless-stopped restart: unless-stopped
ports: ports:
- "40115:40115" - "40115:40115"
- "45375:45375"
volumes: volumes:
- ./uploads:/opt/ass/uploads - ./uploads:/opt/ass/uploads
- ./share:/opt/ass/share - ./share:/opt/ass/share

@ -1,20 +0,0 @@
const fs = require('fs-extra');
const adjectives = fs.readFileSync('./generators/gfycat/adjectives.txt').toString().split('\n');
const animals = fs.readFileSync('./generators/gfycat/animals.txt').toString().split('\n');
// Don't trigger circular dependency during setup
if (!require.main.filename.includes('setup.js'))
var MIN_LENGTH = require('../setup').gfyIdSize; // skipcq: JS-0239, JS-0102
function getWord(list, delim = '') {
return list[Math.floor(Math.random() * list.length)].concat(delim);
}
function genString(count = MIN_LENGTH) {
let gfycat = '';
for (let i = 0; i < (count < MIN_LENGTH ? MIN_LENGTH : count); i++)
gfycat += getWord(adjectives, '-');
return gfycat.concat(getWord(animals));
};
module.exports = ({ gfyLength }) => genString(gfyLength);

@ -1,2 +0,0 @@
const { randomBytes } = require('crypto');
module.exports = (length, charset) => [...randomBytes(length)].map((byte) => charset[Number(byte) % charset.length]).join('').slice(1).concat(charset[0]);

@ -1,2 +0,0 @@
const cryptoRandomString = require('crypto-random-string');
module.exports = ({ length }) => cryptoRandomString({ length, type: 'alphanumeric' });

@ -1,3 +0,0 @@
const lengthGen = require('./lengthGen');
const zeroWidthChars = ['\u200B', '\u200C', '\u200D', '\u2060'];
module.exports = ({ length }) => lengthGen(length, zeroWidthChars);

@ -1,17 +0,0 @@
const fs = require('fs-extra');
const crypto = require('crypto');
const toArray = require('stream-to-array');
const { log } = require('./utils');
/**
* Generates a SHA1 hash for the provided file
* @param {*} file The file to hash
* @returns The SHA1 hash
*/
module.exports = (file) =>
new Promise((resolve, reject) =>
toArray((fs.createReadStream(file.path)))
.then((parts) => Buffer.concat(parts.map((part) => (Buffer.isBuffer(part) ? part : Buffer.from(part)))))
.then((buf) => crypto.createHash('sha1').update(buf).digest('hex')) // skipcq: JS-D003
.then((hash) => log.debug(`Hash for ${file.originalname}`, hash, 'SHA1, hex').callback(resolve, hash))
.catch(reject));

2056
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{ {
"name": "ass", "name": "ass",
"version": "0.8.7", "version": "0.9.0",
"description": "The superior self-hosted ShareX server", "description": "The superior self-hosted ShareX server",
"main": "ass.js", "main": "ass.js",
"engines": { "engines": {
@ -8,17 +8,19 @@
"npm": ">=7.x.x" "npm": ">=7.x.x"
}, },
"scripts": { "scripts": {
"start": "node ass.js", "dev": "npm run build && npm start",
"setup": "node setup.js", "build": "ts",
"metrics": "node metrics.js", "start": "node dist/ass.js",
"new-token": "node generators/token.js", "setup": "node dist/setup.js",
"restart": "systemctl restart ass.service", "metrics": "node dist/metrics.js",
"engine-check": "node checkEngine.js", "new-token": "node dist/generators/token.js",
"engine-check": "node dist/checkEngine.js",
"prestart": "npm run engine-check", "prestart": "npm run engine-check",
"presetup": "npm run engine-check", "presetup": "npm run engine-check",
"logs": "node ./node_modules/@tycrek/log/socketClient.js",
"docker-logs": "docker-compose logs -f --tail=50 --no-log-prefix ass", "docker-logs": "docker-compose logs -f --tail=50 --no-log-prefix ass",
"docker-update": "git pull && docker-compose up --force-recreate --build -d && docker image prune -f", "docker-update": "git pull && npm run docker-uplite",
"docker-uplite": "docker-compose up --force-recreate --build -d && docker image prune -f",
"docker-upfull": "npm run docker-update && npm run docker-resetup",
"docker-resetup": "docker-compose exec ass npm run setup && docker-compose restart" "docker-resetup": "docker-compose exec ass npm run setup && docker-compose restart"
}, },
"repository": "github:tycrek/ass", "repository": "github:tycrek/ass",
@ -35,6 +37,7 @@
"url": "https://patreon.com/tycrek" "url": "https://patreon.com/tycrek"
}, },
"dependencies": { "dependencies": {
"@tsconfig/node14": "^1.0.1",
"@tycrek/express-nofavicon": "^1.0.3", "@tycrek/express-nofavicon": "^1.0.3",
"@tycrek/isprod": "^2.0.2", "@tycrek/isprod": "^2.0.2",
"@tycrek/log": "^0.5.9", "@tycrek/log": "^0.5.9",
@ -54,13 +57,28 @@
"jimp": "^0.16.1", "jimp": "^0.16.1",
"luxon": "^1.26.0", "luxon": "^1.26.0",
"marked": "^2.0.7", "marked": "^2.0.7",
"node-fetch": "^2.6.1", "node-fetch": "^2.6.2",
"node-vibrant": "*", "node-vibrant": "*",
"prompt": "^1.1.0", "prompt": "^1.1.0",
"pug": "^3.0.2", "pug": "^3.0.2",
"sanitize-filename": "^1.6.3", "sanitize-filename": "^1.6.3",
"stream-to-array": "^2.3.0", "stream-to-array": "^2.3.0",
"submodule": "^1.2.1", "submodule": "^1.2.1",
"ts": "^0.2.2",
"uuid": "^8.3.2" "uuid": "^8.3.2"
},
"devDependencies": {
"@types/escape-html": "^1.0.1",
"@types/express": "^4.17.13",
"@types/express-busboy": "^8.0.0",
"@types/ffmpeg-static": "^3.0.0",
"@types/fs-extra": "^9.0.12",
"@types/luxon": "^2.0.3",
"@types/marked": "^3.0.0",
"@types/node": "^16.9.0",
"@types/node-fetch": "^2.5.12",
"@types/stream-to-array": "^2.3.0",
"@types/uuid": "^8.3.1",
"@types/ws": "^7.4.7"
} }
} }

@ -1,7 +1,9 @@
import { AssRequest, AssResponse, ErrWrap } from './definitions';
let doSetup = null; let doSetup = null;
try { try {
// Check if config.json exists // Check if config.json exists
require('./config.json'); require('../config.json');
} catch (err) { } catch (err) {
doSetup = require('./setup').doSetup; doSetup = require('./setup').doSetup;
} }
@ -9,23 +11,24 @@ try {
// Run first time setup if using Docker (pseudo-process, setup will be run with docker exec) // Run first time setup if using Docker (pseudo-process, setup will be run with docker exec)
if (doSetup) { if (doSetup) {
doSetup(); doSetup();
// @ts-ignore
return; return;
} }
// Load the config // Load the config
const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile } = require('./config.json'); const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile } = require('../config.json');
//#region Imports //#region Imports
const fs = require('fs-extra'); import fs from 'fs-extra';
const express = require('express'); import express from 'express';
const nofavicon = require('@tycrek/express-nofavicon'); const nofavicon = require('@tycrek/express-nofavicon');
const helmet = require('helmet'); import helmet from 'helmet';
const marked = require('marked'); import marked from 'marked';
const uploadRouter = require('./routers/upload'); import uploadRouter from './routers/upload';
const resourceRouter = require('./routers/resource'); import resourceRouter from './routers/resource';
const { path, log, getTrueHttp, getTrueDomain } = require('./utils'); import { path, log, getTrueHttp, getTrueDomain } from './utils';
const { CODE_INTERNAL_SERVER_ERROR } = require('./MagicNumbers.json'); const { CODE_INTERNAL_SERVER_ERROR } = require('../MagicNumbers.json');
const { name: ASS_NAME, version: ASS_VERSION } = require('./package.json'); const { name: ASS_NAME, version: ASS_VERSION } = require('../package.json');
//#endregion //#endregion
// Welcome :D // Welcome :D
@ -39,8 +42,8 @@ const ROUTERS = {
}; };
// Read users and data // Read users and data
const users = require('./auth'); import { users } from './auth';
const data = require('./data'); import { data } from './data';
//#endregion //#endregion
// Enable/disable Express features // Enable/disable Express features
@ -66,28 +69,28 @@ useSsl && app.use(helmet.hsts({ preload: true })); // skipcq: JS-0093
app.use(nofavicon); app.use(nofavicon);
// Use custom index, otherwise render README.md // Use custom index, otherwise render README.md
const ASS_INDEX = indexFile !== '' && fs.existsSync(path('share', indexFile)) && require(`./share/${indexFile}`); const ASS_INDEX = indexFile !== '' && fs.existsSync(path('share', indexFile)) && require(`../share/${indexFile}`);
const ASS_INDEX_ENABLED = typeof ASS_INDEX === typeof Function; const ASS_INDEX_ENABLED = typeof ASS_INDEX === typeof Function;
app.get('/', (req, res, next) => ASS_INDEX_ENABLED // skipcq: JS-0229 app.get('/', (req, res, next) => ASS_INDEX_ENABLED // skipcq: JS-0229
? ASS_INDEX(req, res, next) ? ASS_INDEX(req, res, next)
: fs.readFile(path('.github', 'README.md')) : fs.readFile(path('.github', 'README.md'))
.then((bytes) => bytes.toString()) .then((bytes) => bytes.toString())
.then(marked) .then((data) => marked(data))
.then((d) => res.render('index', { data: d })) .then((d) => res.render('index', { data: d }))
.catch(next)); .catch(next));
// Set up custom frontend // Set up custom frontend
const ASS_FRONTEND = fs.existsSync(`./${frontendName}/package.json`) ? (require('submodule'), require(`./${frontendName}`)) : { enabled: false }; const ASS_FRONTEND = fs.existsSync(path(`./${frontendName}/package.json`)) ? (require('submodule'), require(`../${frontendName}`)) : { enabled: false }; // todo: update with src/
ASS_FRONTEND.enabled && app.use(ASS_FRONTEND.endpoint, ASS_FRONTEND.router); // skipcq: JS-0093 ASS_FRONTEND.enabled && app.use(ASS_FRONTEND.endpoint, ASS_FRONTEND.router); // skipcq: JS-0093
// Upload router (has to come after custom frontends as express-busboy interferes with all POST calls) // Upload router (has to come after custom frontends as express-busboy interferes with all POST calls)
app.use('/', ROUTERS.upload); app.use('/', ROUTERS.upload);
// '/:resouceId' always needs to be LAST since it's a catch-all route // '/:resouceId' always needs to be LAST since it's a catch-all route
app.use('/:resourceId', (req, _res, next) => (req.resourceId = req.params.resourceId, next()), ROUTERS.resource); // skipcq: JS-0086, JS-0090 app.use('/:resourceId', (req: AssRequest, _res, next) => (req.resourceId = req.params.resourceId, next()), ROUTERS.resource); // skipcq: JS-0086, JS-0090
// Error handler // Error handler
app.use((err, _req, res, _next) => log.error(err).err(err).callback(() => res.sendStatus(CODE_INTERNAL_SERVER_ERROR))); // skipcq: JS-0128 app.use((err: ErrWrap, _req: AssRequest, res: AssResponse, _next: Function) => log.error(err).err(err).callback(() => res.sendStatus(CODE_INTERNAL_SERVER_ERROR))); // skipcq: JS-0128
// Host the server // Host the server
log log

@ -2,20 +2,19 @@
* Used for global auth management * Used for global auth management
*/ */
const fs = require('fs-extra'); import fs from 'fs-extra';
const { log, path, arrayEquals } = require('./utils'); import { log, path, arrayEquals } from './utils';
const users = require('./auth.json').users || {}; export const users = require('../auth.json').users || {};
// Monitor auth.json for changes (triggered by running 'npm run new-token') // Monitor auth.json for changes (triggered by running 'npm run new-token')
fs.watch(path('auth.json'), { persistent: false }, fs.watch(path('auth.json'), { persistent: false },
(eventType) => eventType === 'change' && fs.readJson(path('auth.json')) (eventType: String) => eventType === 'change' && fs.readJson(path('auth.json'))
.then((json) => { .then((json: { users: JSON[] }) => {
if (!(arrayEquals(Object.keys(users), Object.keys(json.users)))) { if (!(arrayEquals(Object.keys(users), Object.keys(json.users)))) {
// @ts-ignore
Object.keys(json.users).forEach((token) => (!Object.prototype.hasOwnProperty.call(users, token)) && (users[token] = json.users[token])); Object.keys(json.users).forEach((token) => (!Object.prototype.hasOwnProperty.call(users, token)) && (users[token] = json.users[token]));
log.info('New token added', Object.keys(users)[Object.keys(users).length - 1] || 'No new token'); log.info('New token added', Object.keys(users)[Object.keys(users).length - 1] || 'No new token');
} }
}) })
.catch(log.c.error)); .catch(log.c.error));
module.exports = users;

@ -1,5 +1,5 @@
const check = require("check-node-version"); const check = require("check-node-version");
const ENGINES = require('./package.json').engines; const ENGINES = require('../package.json').engines;
const TLog = require('@tycrek/log'); const TLog = require('@tycrek/log');
const logger = new TLog(); const logger = new TLog();

@ -6,7 +6,7 @@
const { JsonDataEngine } = require('@tycrek/papito'); const { JsonDataEngine } = require('@tycrek/papito');
// Actual data engine // Actual data engine
const { dataEngine } = require('./config.json'); const { dataEngine } = require('../config.json');
const { _ENGINE_ } = require(dataEngine); const { _ENGINE_ } = require(dataEngine);
module.exports = _ENGINE_(new JsonDataEngine()); export const data = _ENGINE_(new JsonDataEngine());

@ -0,0 +1,63 @@
import { Request, Response } from "express";
export interface User {
token: string
username: string
}
export interface FileData {
// Data from Multer file object
path: string
size: number
mimetype: string
ext: string
originalname: string
// Data from ass
randomId: string
deleteId: string
is: IsPossible
thumbnail: string
vibrant: string
sha1: string
domain: string
timestamp: number
timeoffset: string
token: string
opengraph: OpenGraphData
// I found this in utils and idk where it comes from
destination: string
}
export interface IsPossible {
image: boolean
video: boolean
audio: boolean
other: boolean
}
export interface OpenGraphData {
title?: string | string[]
description?: string | string[]
author?: string | string[]
authorUrl?: string | string[]
provider?: string | string[]
providerUrl?: string | string[]
color?: string | string[]
}
export interface AssRequest extends Request {
resourceId?: string
ass?: { resourceId: string }
token?: string
file?: FileData
}
export interface AssResponse extends Response {
}
export interface ErrWrap extends Error {
code?: number | string
}

@ -0,0 +1,23 @@
import fs from 'fs-extra';
// Don't trigger circular dependency during setup
if (require !== undefined && !require?.main?.filename.includes('setup.js'))
var MIN_LENGTH = require('../setup').gfyIdSize; // skipcq: JS-0239, JS-0102
function getWord(list: string[], delim = '') {
return list[Math.floor(Math.random() * list.length)].concat(delim);
}
function genString(count = MIN_LENGTH) {
// For some reason these 3 lines MUST be inside the function
const { path } = require('../utils');
const adjectives = fs.readFileSync(path('./gfycat/adjectives.txt')).toString().split('\n');
const animals = fs.readFileSync(path('./gfycat/animals.txt')).toString().split('\n');
let gfycat = '';
for (let i = 0; i < (count < MIN_LENGTH ? MIN_LENGTH : count); i++)
gfycat += getWord(adjectives, '-');
return gfycat.concat(getWord(animals));
};
export default ({ gfyLength }: { gfyLength: number }) => genString(gfyLength);

@ -0,0 +1,2 @@
import { randomBytes } from 'crypto';
export default (length: number, charset: string[]): string => [...randomBytes(length)].map((byte) => charset[Number(byte) % charset.length]).join('').slice(1).concat(charset[0]);

@ -0,0 +1,2 @@
import cryptoRandomString from 'crypto-random-string';
export default ({ length }: { length: number }) => cryptoRandomString({ length, type: 'alphanumeric' });

@ -1,18 +1,19 @@
const uuid = require('uuid').v4; import { v4 as uuid } from 'uuid';
const fs = require('fs-extra'); import fs from 'fs-extra';
const path = require('path'); import path from 'path';
const randomGen = require('./random'); import randomGen from './random';
const TLog = require('@tycrek/log'); const TLog = require('@tycrek/log');
const log = new TLog(); const log = new TLog();
const MAX_USERNAME = 20; const MAX_USERNAME = 20;
export default () => uuid().replace(/-/g, '');
module.exports = () => uuid().replace(/-/g, ''); module.exports = () => uuid().replace(/-/g, '');
// If directly called on the command line, generate a new token // If directly called on the command line, generate a new token
if (require.main === module) { if (require.main === module) {
const token = module.exports(); const token = module.exports();
const authPath = path.join(__dirname, '..', 'auth.json'); const authPath = path.join(process.cwd(), 'auth.json');
let name = ''; let name = '';
fs.readJson(authPath) fs.readJson(authPath)
@ -20,7 +21,7 @@ if (require.main === module) {
// Generate the user // Generate the user
const username = process.argv[2] ? process.argv[2].replace(/[^\da-z_]/gi, '').substring(0, MAX_USERNAME) : randomGen({ length: 20 }); // skipcq: JS-0074 const username = process.argv[2] ? process.argv[2].replace(/[^\da-z_]/gi, '').substring(0, MAX_USERNAME) : randomGen({ length: 20 }); // skipcq: JS-0074
if (!auth.users) auth.users = {}; if (!auth.users) auth.users = {};
if (Object.values(auth.users).findIndex((user) => user.username === username) !== -1) { if (Object.values(auth.users).findIndex((user: any) => user.username === username) !== -1) {
log.error('Username already exists', username); log.error('Username already exists', username);
process.exit(1); process.exit(1);
} }

@ -0,0 +1,3 @@
import lengthGen from './lengthGen';
const zeroWidthChars = ['\u200B', '\u200C', '\u200D', '\u2060'];
export default ({ length }: { length: number }) => lengthGen(length, zeroWidthChars);

@ -0,0 +1,18 @@
import { FileData } from './definitions';
import fs from 'fs-extra';
import crypto from 'crypto';
import toArray from 'stream-to-array';
import { log } from './utils';
/**
* Generates a SHA1 hash for the provided file
* @param {*} file The file to hash
* @returns The SHA1 hash
*/
export default (file: FileData) =>
new Promise((resolve, reject) =>
toArray((fs.createReadStream(file.path)))
.then((parts: any[]) => Buffer.concat(parts.map((part: any) => (Buffer.isBuffer(part) ? part : Buffer.from(part)))))
.then((buf: Buffer) => crypto.createHash('sha1').update(buf).digest('hex')) // skipcq: JS-D003
.then((hash: String) => log.debug(`Hash for ${file.originalname}`, hash, 'SHA1, hex').callback(resolve, hash))
.catch(reject));

@ -0,0 +1,22 @@
const TLog = require('@tycrek/log');
// Set up logging
const logger = new TLog({
level: process.env.LOG_LEVEL || (process.env.NODE_ENV === 'production' ? 'info' : 'debug'),
timestamp: {
enabled: true,
colour: 'grey',
preset: 'DATETIME_MED'
},
});
// Enable the Express logger
logger.enable.express({ handle500: false }).debug('Plugin enabled', 'Express');
/**
* @type {TLog}
*/
// yeet
export default logger;

@ -1,6 +1,6 @@
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const { s3enabled } = require('./config.json'); const { s3enabled } = require('../config.json');
const { formatBytes } = require('./utils'); const { formatBytes } = require('./utils');
const { bucketSize } = require('./storage'); const { bucketSize } = require('./storage');
@ -8,8 +8,8 @@ const TLog = require('@tycrek/log');
const log = new TLog({ level: 'debug', timestamp: { enabled: false } }); const log = new TLog({ level: 'debug', timestamp: { enabled: false } });
module.exports = () => { module.exports = () => {
const data = require('./data'); const data = require('./data').data;
const { users } = fs.readJsonSync(path.join(__dirname, 'auth.json')); const { users } = fs.readJsonSync(path.join(process.cwd(), 'auth.json'));
Object.keys(users).forEach((token) => users[token].count = 0); Object.keys(users).forEach((token) => users[token].count = 0);
let totalSize = 0; let totalSize = 0;

@ -1,35 +1,39 @@
const fs = require('fs-extra'); import { FileData, IsPossible, AssRequest, AssResponse } from '../definitions';
const escape = require('escape-html');
const fetch = require('node-fetch'); import fs from 'fs-extra';
const { deleteS3 } = require('../storage'); import escape from 'escape-html';
const { diskFilePath, s3enabled, viewDirect } = require('../config.json'); import fetch, { Response } from 'node-fetch';
const { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } = require('../utils'); import { deleteS3 } from '../storage';
const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../MagicNumbers.json'); const { diskFilePath, s3enabled, viewDirect } = require('../../config.json');
const data = require('../data'); import { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } from '../utils';
const users = require('../auth'); const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../../MagicNumbers.json');
import { data } from '../data';
const express = require('express'); import { users } from '../auth';
import express from 'express';
const router = express.Router(); const router = express.Router();
// Middleware for parsing the resource ID and handling 404 // Middleware for parsing the resource ID and handling 404
router.use((req, res, next) => { router.use((req: AssRequest, res: AssResponse, next) => {
// Parse the resource ID // Parse the resource ID
req.ass = { resourceId: escape(req.resourceId || '').split('.')[0] }; req.ass = { resourceId: escape(req.resourceId || '').split('.')[0] };
// If the ID is invalid, return 404. Otherwise, continue normally // If the ID is invalid, return 404. Otherwise, continue normally
data.has(req.ass.resourceId) data.has(req.ass.resourceId)
.then((has) => has ? next() : res.sendStatus(CODE_NOT_FOUND)) // skipcq: JS-0229 .then((has: boolean) => has ? next() : res.sendStatus(CODE_NOT_FOUND)) // skipcq: JS-0229
.catch(next); .catch(next);
}); });
// View file // View file
router.get('/', (req, res, next) => data.get(req.ass.resourceId).then((fileData) => { router.get('/', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.resourceId).then((fileData: FileData) => {
const { resourceId } = req.ass; const resourceId = req.ass!.resourceId;
// Build OpenGraph meta tags // Build OpenGraph meta tags
const og = fileData.opengraph, ogs = ['']; const og = fileData.opengraph, ogs = [''];
og.title && (ogs.push(`<meta property="og:title" content="${og.title}">`)); // skipcq: JS-0093 og.title && (ogs.push(`<meta property="og:title" content="${og.title}">`)); // skipcq: JS-0093
og.description && (ogs.push(`<meta property="og:description" content="${og.description}">`)); // skipcq: JS-0093 og.description && (ogs.push(`<meta property="og:description" content="${og.description}">`)); // skipcq: JS-0093
// todo: figure out how to not ignore this
// @ts-ignore
og.color && (ogs.push(`<meta name="theme-color" content="${getResourceColor(og.color, fileData.vibrant)}">`)); // skipcq: JS-0093 og.color && (ogs.push(`<meta name="theme-color" content="${getResourceColor(og.color, fileData.vibrant)}">`)); // skipcq: JS-0093
!fileData.is.video && (ogs.push(`<meta name="twitter:card" content="summary_large_image">`)); // skipcq: JS-0093 !fileData.is.video && (ogs.push(`<meta name="twitter:card" content="summary_large_image">`)); // skipcq: JS-0093
@ -41,6 +45,8 @@ router.get('/', (req, res, next) => data.get(req.ass.resourceId).then((fileData)
uploader: users[fileData.token].username, uploader: users[fileData.token].username,
timestamp: formatTimestamp(fileData.timestamp, fileData.timeoffset), timestamp: formatTimestamp(fileData.timestamp, fileData.timeoffset),
size: formatBytes(fileData.size), size: formatBytes(fileData.size),
// todo: figure out how to not ignore this
// @ts-ignore
color: getResourceColor(fileData.opengraph.color || null, fileData.vibrant), color: getResourceColor(fileData.opengraph.color || null, fileData.vibrant),
resourceAttr: { src: getDirectUrl(resourceId) }, resourceAttr: { src: getDirectUrl(resourceId) },
discordUrl: `${getDirectUrl(resourceId)}${fileData.ext}`, discordUrl: `${getDirectUrl(resourceId)}${fileData.ext}`,
@ -53,19 +59,19 @@ router.get('/', (req, res, next) => data.get(req.ass.resourceId).then((fileData)
}).catch(next)); }).catch(next));
// Direct resource // Direct resource
router.get('/direct*', (req, res, next) => data.get(req.ass.resourceId).then((fileData) => { router.get('/direct*', (req: AssRequest, res: AssResponse, next) => data.get(req.ass?.resourceId).then((fileData: FileData) => {
// Send file as an attachement for downloads // Send file as an attachement for downloads
if (req.query.download) if (req.query.download)
res.header('Content-Disposition', `attachment; filename="${fileData.originalname}"`); res.header('Content-Disposition', `attachment; filename="${fileData.originalname}"`);
// Return the file differently depending on what storage option was used // Return the file differently depending on what storage option was used
const uploaders = { const uploaders = {
s3: () => fetch(getS3url(fileData.randomId, fileData.ext)).then((file) => { s3: () => fetch(getS3url(fileData.randomId, fileData.ext)).then((file: Response) => {
file.headers.forEach((value, header) => res.setHeader(header, value)); file.headers.forEach((value, header) => res.setHeader(header, value));
file.body.pipe(res); file.body?.pipe(res);
}), }),
local: () => { local: () => {
res.header('Accept-Ranges', 'bytes').header('Content-Length', fileData.size).type(fileData.mimetype); res.header('Accept-Ranges', 'bytes').header('Content-Length', `${fileData.size}`).type(fileData.mimetype);
fs.createReadStream(fileData.path).pipe(res); fs.createReadStream(fileData.path).pipe(res);
} }
}; };
@ -74,33 +80,37 @@ router.get('/direct*', (req, res, next) => data.get(req.ass.resourceId).then((fi
}).catch(next)); }).catch(next));
// Thumbnail response // Thumbnail response
router.get('/thumbnail', (req, res, next) => router.get('/thumbnail', (req: AssRequest, res: AssResponse, next) =>
data.get(req.ass.resourceId) data.get(req.ass?.resourceId)
.then(({ is, thumbnail }) => fs.readFile((!is || (is.image || is.video)) ? path(diskFilePath, 'thumbnails/', thumbnail) : is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png')) .then(({ is, thumbnail }: { is: IsPossible, thumbnail: string }) => fs.readFile((!is || (is.image || is.video)) ? path(diskFilePath, 'thumbnails/', thumbnail) : is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png'))
.then((fileData) => res.type('jpg').send(fileData)) .then((fileData: Buffer) => res.type('jpg').send(fileData))
.catch(next)); .catch(next));
// oEmbed response for clickable authors/providers // oEmbed response for clickable authors/providers
// https://oembed.com/ // https://oembed.com/
// https://old.reddit.com/r/discordapp/comments/82p8i6/a_basic_tutorial_on_how_to_get_the_most_out_of/ // https://old.reddit.com/r/discordapp/comments/82p8i6/a_basic_tutorial_on_how_to_get_the_most_out_of/
router.get('/oembed', (req, res, next) => router.get('/oembed', (req: AssRequest, res: AssResponse, next) =>
data.get(req.ass.resourceId) data.get(req.ass?.resourceId)
.then(({ opengraph, is, size, timestamp, timeoffset, originalname }) => .then((fileData: FileData) =>
res.type('json').send({ res.type('json').send({
version: '1.0', version: '1.0',
type: is.video ? 'video' : is.image ? 'photo' : 'link', type: fileData.is.video ? 'video' : fileData.is.image ? 'photo' : 'link',
author_url: opengraph.authorUrl, author_url: fileData.opengraph.authorUrl,
provider_url: opengraph.providerUrl, provider_url: fileData.opengraph.providerUrl,
author_name: replaceholder(opengraph.author || '', size, timestamp, timeoffset, originalname), // todo: figure out how to not ignore this
provider_name: replaceholder(opengraph.provider || '', size, timestamp, timeoffset, originalname) // @ts-ignore
author_name: replaceholder(fileData.opengraph.author || '', fileData.size, fileData.timestamp, fileData.timeoffset, fileData.originalname),
// todo: figure out how to not ignore this
// @ts-ignore
provider_name: replaceholder(fileData.opengraph.provider || '', fileData.size, fileData.timestamp, fileData.timeoffset, fileData.originalname)
})) }))
.catch(next)); .catch(next));
// Delete file // Delete file
router.get('/delete/:deleteId', (req, res, next) => { router.get('/delete/:deleteId', (req: AssRequest, res: AssResponse, next) => {
let oldName, oldType; // skipcq: JS-0119 let oldName: string, oldType: string; // skipcq: JS-0119
data.get(req.ass.resourceId) data.get(req.ass?.resourceId)
.then((fileData) => { .then((fileData: FileData) => {
// Extract info for logs // Extract info for logs
oldName = fileData.originalname; oldName = fileData.originalname;
oldType = fileData.mimetype; oldType = fileData.mimetype;
@ -117,9 +127,9 @@ router.get('/delete/:deleteId', (req, res, next) => {
(!fileData.is || (fileData.is.image || fileData.is.video)) && fs.existsSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) (!fileData.is || (fileData.is.image || fileData.is.video)) && fs.existsSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail))
? fs.rmSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) : () => Promise.resolve()]); ? fs.rmSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) : () => Promise.resolve()]);
}) })
.then(() => data.del(req.ass.resourceId)) .then(() => data.del(req.ass?.resourceId))
.then(() => (log.success('Deleted', oldName, oldType), res.type('text').send('File has been deleted!'))) // skipcq: JS-0090 .then(() => (log.success('Deleted', oldName, oldType), res.type('text').send('File has been deleted!'))) // skipcq: JS-0090
.catch(next); .catch(next);
}); });
module.exports = router; export default router;

@ -1,24 +1,27 @@
const fs = require('fs-extra'); import { FileData, AssRequest, AssResponse, ErrWrap, User } from "../definitions";
const bb = require('express-busboy');
import fs from 'fs-extra';
import bb from 'express-busboy';
//const rateLimit = require('express-rate-limit'); //const rateLimit = require('express-rate-limit');
const { DateTime } = require('luxon'); import { DateTime } from 'luxon';
const { Webhook, MessageBuilder } = require('discord-webhook-node'); import { Webhook, MessageBuilder } from 'discord-webhook-node';
const { processUploaded } = require('../storage'); import { processUploaded } from '../storage';
const { maxUploadSize, resourceIdSize, gfyIdSize, resourceIdType, spaceReplace } = require('../config.json'); const { maxUploadSize, resourceIdSize, gfyIdSize, resourceIdType, spaceReplace } = require('../../config.json');
const { path, log, verify, getTrueHttp, getTrueDomain, generateId, formatBytes } = require('../utils'); import { path, log, verify, getTrueHttp, getTrueDomain, generateId, formatBytes } from '../utils';
const { CODE_UNAUTHORIZED, CODE_PAYLOAD_TOO_LARGE } = require('../MagicNumbers.json'); const { CODE_UNAUTHORIZED, CODE_PAYLOAD_TOO_LARGE } = require('../../MagicNumbers.json');
const data = require('../data'); import { data } from '../data';
const users = require('../auth'); import { users } from '../auth';
const ASS_LOGO = 'https://cdn.discordapp.com/icons/848274994375294986/8d339d4a2f3f54b2295e5e0ff62bd9e6.png?size=1024'; const ASS_LOGO = 'https://cdn.discordapp.com/icons/848274994375294986/8d339d4a2f3f54b2295e5e0ff62bd9e6.png?size=1024';
const express = require('express'); import express from 'express';
const router = express.Router(); const router = express.Router();
// Set up express-busboy // Set up express-busboy
// @ts-ignore
bb.extend(router, { bb.extend(router, {
upload: true, upload: true,
restrictMultiple: true, restrictMultiple: true,
allowedPath: (url) => url === '/', allowedPath: (url: string) => url === '/',
}); });
// Rate limit middleware // Rate limit middleware
@ -28,7 +31,7 @@ bb.extend(router, {
})); */ })); */
// Block unauthorized requests and attempt token sanitization // Block unauthorized requests and attempt token sanitization
router.post('/', (req, res, next) => { router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
req.headers.authorization = req.headers.authorization || ''; req.headers.authorization = req.headers.authorization || '';
req.token = req.headers.authorization.replace(/[^\da-z]/gi, ''); // Strip anything that isn't a digit or ASCII letter req.token = req.headers.authorization.replace(/[^\da-z]/gi, ''); // Strip anything that isn't a digit or ASCII letter
!verify(req, users) ? log.warn('Upload blocked', 'Unauthorized').callback(() => res.sendStatus(CODE_UNAUTHORIZED)) : next(); // skipcq: JS-0093 !verify(req, users) ? log.warn('Upload blocked', 'Unauthorized').callback(() => res.sendStatus(CODE_UNAUTHORIZED)) : next(); // skipcq: JS-0093
@ -38,28 +41,28 @@ router.post('/', (req, res, next) => {
router.post('/', processUploaded); router.post('/', processUploaded);
// Max upload size error handling // Max upload size error handling
router.use('/', (err, _req, res, next) => err.message === 'LIMIT_FILE_SIZE' ? log.warn('Upload blocked', 'File too large').callback(() => res.status(CODE_PAYLOAD_TOO_LARGE).send(`Max upload size: ${maxUploadSize}MB`)) : next(err)); // skipcq: JS-0229 router.use('/', (err: ErrWrap, _req: AssRequest, res: AssResponse, next: Function) => err.message === 'LIMIT_FILE_SIZE' ? log.warn('Upload blocked', 'File too large').callback(() => res.status(CODE_PAYLOAD_TOO_LARGE).send(`Max upload size: ${maxUploadSize}MB`)) : next(err)); // skipcq: JS-0229
// Process uploaded file // Process uploaded file
router.post('/', (req, res, next) => { router.post('/', (req: AssRequest, res: AssResponse, next: Function) => {
// Load overrides // Load overrides
const trueDomain = getTrueDomain(req.headers['x-ass-domain']); const trueDomain = getTrueDomain(req.headers['x-ass-domain']);
const generator = req.headers['x-ass-access'] || resourceIdType; const generator = req.headers['x-ass-access'] || resourceIdType;
// Save domain with file // Save domain with file
req.file.domain = `${getTrueHttp()}${trueDomain}`; req.file!.domain = `${getTrueHttp()}${trueDomain}`;
// Get the uploaded time in milliseconds // Get the uploaded time in milliseconds
req.file.timestamp = DateTime.now().toMillis(); req.file!.timestamp = DateTime.now().toMillis();
// Save the timezone offset // Save the timezone offset
req.file.timeoffset = req.headers['x-ass-timeoffset'] || 'UTC+0'; req.file!.timeoffset = req.headers['x-ass-timeoffset']?.toString() || 'UTC+0';
// Keep track of the token that uploaded the resource // Keep track of the token that uploaded the resource
req.file.token = req.token; req.file!.token = req.token ?? '';
// Attach any embed overrides, if necessary // Attach any embed overrides, if necessary
req.file.opengraph = { req.file!.opengraph = {
title: req.headers['x-ass-og-title'], title: req.headers['x-ass-og-title'],
description: req.headers['x-ass-og-description'], description: req.headers['x-ass-og-description'],
author: req.headers['x-ass-og-author'], author: req.headers['x-ass-og-author'],
@ -70,13 +73,13 @@ router.post('/', (req, res, next) => {
}; };
// Fix spaces in originalname // Fix spaces in originalname
req.file.originalname = req.file.originalname.replace(/\s/g, spaceReplace === '!' ? '' : spaceReplace); req.file!.originalname = req.file!.originalname.replace(/\s/g, spaceReplace === '!' ? '' : spaceReplace);
// Generate a unique resource ID // Generate a unique resource ID
let resourceId = ''; let resourceId = '';
// Function to call to generate a fresh ID. Used for multiple attempts in case an ID is already taken // Function to call to generate a fresh ID. Used for multiple attempts in case an ID is already taken
const gen = () => generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file.originalname); const gen = () => generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file!.originalname);
// Keeps track of the number of attempts in case all ID's are taken // Keeps track of the number of attempts in case all ID's are taken
const attempts = { const attempts = {
@ -85,11 +88,11 @@ router.post('/', (req, res, next) => {
}; };
// Called by a promise, this will recursively resolve itself until a unique ID is found // Called by a promise, this will recursively resolve itself until a unique ID is found
function genCheckId(resolve, reject) { function genCheckId(resolve: Function, reject: Function) {
const uniqueId = gen(); const uniqueId = gen();
attempts.count++; attempts.count++;
data.has(uniqueId) data.has(uniqueId)
.then((exists) => { .then((exists: boolean) => {
log.debug('ID check', exists ? 'Taken' : 'Available'); log.debug('ID check', exists ? 'Taken' : 'Available');
return attempts.count - 1 >= attempts.max ? reject(new Error('No ID\'s remaining')) : exists ? genCheckId(resolve, reject) : resolve(uniqueId); return attempts.count - 1 >= attempts.max ? reject(new Error('No ID\'s remaining')) : exists ? genCheckId(resolve, reject) : resolve(uniqueId);
}) })
@ -98,19 +101,20 @@ router.post('/', (req, res, next) => {
new Promise((resolve, reject) => genCheckId(resolve, reject)) new Promise((resolve, reject) => genCheckId(resolve, reject))
.then((uniqueId) => { .then((uniqueId) => {
//@ts-ignore
resourceId = uniqueId; resourceId = uniqueId;
log.debug('Saving data', data.name); log.debug('Saving data', data.name);
}) })
.then(() => data.put(resourceId.split('.')[0], req.file)) .then(() => data.put(resourceId.split('.')[0], req.file))
.then(() => { .then(() => {
// Log the upload // Log the upload
const logInfo = `${req.file.originalname} (${req.file.mimetype}, ${formatBytes(req.file.size)})`; const logInfo = `${req.file!.originalname} (${req.file!.mimetype}, ${formatBytes(req.file!.size)})`;
log.success('File uploaded', logInfo, `uploaded by ${users[req.token] ? users[req.token].username : '<token-only>'}`); log.success('File uploaded', logInfo, `uploaded by ${users[req.token ?? ''] ? users[req.token ?? ''].username : '<token-only>'}`);
// Build the URLs // Build the URLs
const resourceUrl = `${getTrueHttp()}${trueDomain}/${resourceId}`; const resourceUrl = `${getTrueHttp()}${trueDomain}/${resourceId}`;
const thumbnailUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/thumbnail`; const thumbnailUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/thumbnail`;
const deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file.deleteId}`; const deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file!.deleteId}`;
// Send the response // Send the response
res.type('json').send({ resource: resourceUrl, thumbnail: thumbnailUrl, delete: deleteUrl }) res.type('json').send({ resource: resourceUrl, thumbnail: thumbnailUrl, delete: deleteUrl })
@ -121,17 +125,18 @@ router.post('/', (req, res, next) => {
if (req.headers['x-ass-webhook-url']) { if (req.headers['x-ass-webhook-url']) {
// Build the webhook // Build the webhook
const hook = new Webhook(req.headers['x-ass-webhook-url']); const hook = new Webhook(req.headers['x-ass-webhook-url']?.toString());
hook.setUsername(req.headers['x-ass-webhook-username'] || 'ass'); hook.setUsername(req.headers['x-ass-webhook-username']?.toString() || 'ass');
hook.setAvatar(req.headers['x-ass-webhook-avatar'] || ASS_LOGO); hook.setAvatar(req.headers['x-ass-webhook-avatar']?.toString() || ASS_LOGO);
// Build the embed // Build the embed
const embed = new MessageBuilder() const embed = new MessageBuilder()
.setTitle(logInfo) .setTitle(logInfo)
//@ts-ignore
.setURL(resourceUrl) .setURL(resourceUrl)
.setDescription(`**Size:** \`${formatBytes(req.file.size)}\`\n**[Delete](${deleteUrl})**`) .setDescription(`**Size:** \`${formatBytes(req.file!.size)}\`\n**[Delete](${deleteUrl})**`)
.setThumbnail(thumbnailUrl) .setThumbnail(thumbnailUrl)
.setColor(req.file.vibrant) .setColor(req.file!.vibrant)
.setTimestamp(); .setTimestamp();
// Send the embed to the webhook, then delete the client after to free resources // Send the embed to the webhook, then delete the client after to free resources
@ -142,19 +147,25 @@ router.post('/', (req, res, next) => {
} }
// Also update the users upload count // Also update the users upload count
if (!users[req.token]) { if (!users[req.token ?? '']) {
const generateUsername = () => generateId('random', 20, null); // skipcq: JS-0074 const generateUsername = () => generateId('random', 20, 0, req.file!.size.toString()); // skipcq: JS-0074
let username = generateUsername(); let username: string = generateUsername();
while (Object.values(users).findIndex((user) => user.username === username) !== -1) // skipcq: JS-0073
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
while (Object.values(users).findIndex((user: User) => user.username === username) !== -1) // skipcq: JS-0073
username = generateUsername(); username = generateUsername();
users[req.token] = { username, count: 0 };
users[req.token ?? ''] = { username, count: 0 };
} }
users[req.token].count += 1; users[req.token ?? ''].count += 1;
fs.writeJsonSync(path('auth.json'), { users }, { spaces: 4 }); fs.writeJsonSync(path('auth.json'), { users }, { spaces: 4 });
log.debug('Upload request flow completed', ''); log.debug('Upload request flow completed', '');
}); });
}).catch(next); })
//@ts-ignore
.catch(next);
}); });
module.exports = router; export default router;

@ -55,7 +55,7 @@ function getConfirmSchema(description) {
// If directly called on the command line, run setup script // If directly called on the command line, run setup script
function doSetup() { function doSetup() {
const path = (...paths) => require('path').join(__dirname, ...paths); const path = (...paths) => require('path').join(process.cwd(), ...paths);
const TLog = require('@tycrek/log'); const TLog = require('@tycrek/log');
const fs = require('fs-extra'); const fs = require('fs-extra');
const prompt = require('prompt'); const prompt = require('prompt');
@ -66,7 +66,7 @@ function doSetup() {
// Override default configs with existing configs to allow migrating configs // Override default configs with existing configs to allow migrating configs
// Now that's a lot of configs! // Now that's a lot of configs!
try { try {
const existingConfig = require('./config.json'); const existingConfig = require('../config.json');
Object.entries(existingConfig).forEach(([key, value]) => { Object.entries(existingConfig).forEach(([key, value]) => {
Object.prototype.hasOwnProperty.call(config, key) && (config[key] = value); // skipcq: JS-0093 Object.prototype.hasOwnProperty.call(config, key) && (config[key] = value); // skipcq: JS-0093
Object.prototype.hasOwnProperty.call(s3config, key) && (s3config[key] = value); // skipcq: JS-0093 Object.prototype.hasOwnProperty.call(s3config, key) && (s3config[key] = value); // skipcq: JS-0093
@ -278,7 +278,7 @@ function doSetup() {
// Complete & exit // Complete & exit
.then(() => log.blank().success('Setup complete').callback(() => process.exit(0))) .then(() => log.blank().success('Setup complete').callback(() => process.exit(0)))
.catch((err) => log.blank().error(err)); .catch((err) => log.blank().error(err).callback(() => process.exit(1)));
} }
module.exports = { module.exports = {

@ -3,12 +3,12 @@
const fs = require('fs-extra'); const fs = require('fs-extra');
const aws = require('aws-sdk'); const aws = require('aws-sdk');
const Thumbnail = require('./thumbnails'); const Thumbnail = require('./thumbnails').default;
const Vibrant = require('./vibrant'); const Vibrant = require('./vibrant').default;
const Hash = require('./hash'); const Hash = require('./hash').default;
const { getDatedDirname, generateId, log } = require('./utils'); const { getDatedDirname, generateId, log } = require('./utils');
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, saveAsOriginal, mediaStrict, maxUploadSize } = require('./config.json'); const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, saveAsOriginal, mediaStrict, maxUploadSize } = require('../config.json');
const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('./MagicNumbers.json'); const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('../MagicNumbers.json');
const ID_GEN_LENGTH = 32; const ID_GEN_LENGTH = 32;
const ALLOWED_MIMETYPES = /(image)|(video)|(audio)\//; const ALLOWED_MIMETYPES = /(image)|(video)|(audio)\//;

@ -1,9 +1,11 @@
const ffmpeg = require('ffmpeg-static'); import { FileData } from "./definitions";
const Jimp = require('jimp'); import ffmpeg from 'ffmpeg-static';
const shell = require('any-shell-escape'); import Jimp from 'jimp';
const { exec } = require('child_process'); // @ts-ignore
const { isProd, path } = require('./utils'); import shell from 'any-shell-escape';
const { diskFilePath } = require('./config.json'); import { exec } from 'child_process';
import { isProd, path } from './utils';
const { diskFilePath } = require('../config.json');
// Thumbnail parameters // Thumbnail parameters
const THUMBNAIL = { const THUMBNAIL = {
@ -18,7 +20,7 @@ const THUMBNAIL = {
* @param {String} dest Path of the output file * @param {String} dest Path of the output file
* @returns {String} The command to execute * @returns {String} The command to execute
*/ */
function getCommand(src, dest) { function getCommand(src: String, dest: String) {
return shell([ return shell([
ffmpeg, '-y', ffmpeg, '-y',
'-v', (isProd ? 'error' : 'debug'), // Log level '-v', (isProd ? 'error' : 'debug'), // Log level
@ -35,7 +37,7 @@ function getCommand(src, dest) {
* @param {String} oldName The original filename * @param {String} oldName The original filename
* @returns {String} The filename for the thumbnail * @returns {String} The filename for the thumbnail
*/ */
function getNewName(oldName) { function getNewName(oldName: String) {
return oldName.concat('.thumbnail.jpg'); return oldName.concat('.thumbnail.jpg');
} }
@ -44,7 +46,7 @@ function getNewName(oldName) {
* @param {String} oldName The original filename * @param {String} oldName The original filename
* @returns {String} The path to the thumbnail * @returns {String} The path to the thumbnail
*/ */
function getNewNamePath(oldName) { function getNewNamePath(oldName: String) {
return path(diskFilePath, 'thumbnails/', getNewName(oldName)); return path(diskFilePath, 'thumbnails/', getNewName(oldName));
} }
@ -52,10 +54,11 @@ function getNewNamePath(oldName) {
* Extracts an image from a video file to use as a thumbnail, using ffmpeg * Extracts an image from a video file to use as a thumbnail, using ffmpeg
* @param {*} file The video file to pull a frame from * @param {*} file The video file to pull a frame from
*/ */
function getVideoThumbnail(file) { function getVideoThumbnail(file: FileData) {
return new Promise((resolve, reject) => exec( return new Promise((resolve: Function, reject: Function) => exec(
getCommand(file.path, getNewNamePath(file.randomId)), getCommand(file.path, getNewNamePath(file.randomId)),
(err) => (err ? reject(err) : resolve()) // @ts-ignore
(err: Error) => (err ? reject(err) : resolve())
)); ));
} }
@ -63,7 +66,7 @@ function getVideoThumbnail(file) {
* Generates a thumbnail for the provided image * Generates a thumbnail for the provided image
* @param {*} file The file to generate a thumbnail for * @param {*} file The file to generate a thumbnail for
*/ */
function getImageThumbnail(file) { function getImageThumbnail(file: FileData) {
return new Promise((resolve, reject) => return new Promise((resolve, reject) =>
Jimp.read(file.path) Jimp.read(file.path)
.then((image) => image .then((image) => image
@ -79,7 +82,7 @@ function getImageThumbnail(file) {
* @param {*} file The file to generate a thumbnail for * @param {*} file The file to generate a thumbnail for
* @returns The thumbnail filename (NOT the path) * @returns The thumbnail filename (NOT the path)
*/ */
module.exports = (file) => export default (file: FileData) =>
new Promise((resolve, reject) => new Promise((resolve, reject) =>
(file.is.video ? getVideoThumbnail : file.is.image ? getImageThumbnail : () => Promise.resolve())(file) (file.is.video ? getVideoThumbnail : file.is.image ? getImageThumbnail : () => Promise.resolve())(file)
.then(() => resolve((file.is.video || file.is.image) ? getNewName(file.randomId) : file.is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png')) .then(() => resolve((file.is.video || file.is.image) ? getNewName(file.randomId) : file.is.audio ? 'views/ass-audio-icon.png' : 'views/ass-file-icon.png'))

@ -1,39 +1,41 @@
const fs = require('fs-extra'); import { AssRequest, FileData } from './definitions';
const Path = require('path'); import fs from 'fs-extra';
const TLog = require('@tycrek/log'); import Path from 'path';
const fetch = require('node-fetch'); import fetch from 'node-fetch';
const sanitize = require('sanitize-filename'); import sanitize from 'sanitize-filename';
const { DateTime } = require('luxon'); import { DateTime } from 'luxon';
const token = require('./generators/token'); import token from './generators/token';
const zwsGen = require('./generators/zws'); import zwsGen from './generators/zws';
const randomGen = require('./generators/random'); import randomGen from './generators/random';
const gfyGen = require('./generators/gfycat'); import gfyGen from './generators/gfycat';
const { HTTP, HTTPS, KILOBYTES } = require('./MagicNumbers.json'); import logger from './logger';
const { HTTP, HTTPS, KILOBYTES } = require('../MagicNumbers.json');
// Catch config.json not existing when running setup script // Catch config.json not existing when running setup script
try { try {
var { useSsl, port, domain, isProxied, diskFilePath, saveWithDate, s3bucket, s3endpoint, s3usePathStyle } = require('./config.json'); // skipcq: JS-0239, JS-0102 var { useSsl, port, domain, isProxied, diskFilePath, saveWithDate, s3bucket, s3endpoint, s3usePathStyle } = require('../config.json'); // skipcq: JS-0239, JS-0102
} catch (ex) { } catch (ex) {
// @ts-ignore
if (ex.code !== 'MODULE_NOT_FOUND') console.error(ex); if (ex.code !== 'MODULE_NOT_FOUND') console.error(ex);
} }
function getTrueHttp() { export function getTrueHttp() {
return ('http').concat(useSsl ? 's' : '').concat('://'); return ('http').concat(useSsl ? 's' : '').concat('://');
} }
function getTrueDomain(d = domain) { export function getTrueDomain(d = domain) {
return d.concat((port === HTTP || port === HTTPS || isProxied) ? '' : `:${port}`); return d.concat((port === HTTP || port === HTTPS || isProxied) ? '' : `:${port}`);
} }
function getS3url(s3key, ext) { export function getS3url(s3key: string, ext: string) {
return `https://${s3usePathStyle ? `${s3endpoint}/${s3bucket}` : `${s3bucket}.${s3endpoint}`}/${s3key}${ext}`; return `https://${s3usePathStyle ? `${s3endpoint}/${s3bucket}` : `${s3bucket}.${s3endpoint}`}/${s3key}${ext}`;
} }
function getDirectUrl(resourceId) { export function getDirectUrl(resourceId: string) {
return `${getTrueHttp()}${getTrueDomain()}/${resourceId}/direct`; return `${getTrueHttp()}${getTrueDomain()}/${resourceId}/direct`;
} }
function randomHexColour() { // From: https://www.geeksforgeeks.org/javascript-generate-random-hex-codes-color/ export function randomHexColour() { // From: https://www.geeksforgeeks.org/javascript-generate-random-hex-codes-color/
const letters = '0123456789ABCDEF'; const letters = '0123456789ABCDEF';
let colour = '#'; let colour = '#';
for (let i = 0; i < 6; i++) // skipcq: JS-0074 for (let i = 0; i < 6; i++) // skipcq: JS-0074
@ -41,29 +43,29 @@ function randomHexColour() { // From: https://www.geeksforgeeks.org/javascript-g
return colour; return colour;
} }
function getResourceColor(colorValue, vibrantValue) { export function getResourceColor(colorValue: string, vibrantValue: string) {
return colorValue === '&random' ? randomHexColour() : colorValue === '&vibrant' ? vibrantValue : colorValue; return colorValue === '&random' ? randomHexColour() : colorValue === '&vibrant' ? vibrantValue : colorValue;
} }
function formatTimestamp(timestamp, timeoffset) { export function formatTimestamp(timestamp: number, timeoffset: string) {
return DateTime.fromMillis(timestamp).setZone(timeoffset).toLocaleString(DateTime.DATETIME_MED); return DateTime.fromMillis(timestamp).setZone(timeoffset).toLocaleString(DateTime.DATETIME_MED);
} }
function formatBytes(bytes, decimals = 2) { // skipcq: JS-0074 export function formatBytes(bytes: number, decimals = 2) { // skipcq: JS-0074
if (bytes === 0) return '0 Bytes'; if (bytes === 0) return '0 Bytes';
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
const i = Math.floor(Math.log(bytes) / Math.log(KILOBYTES)); const i = Math.floor(Math.log(bytes) / Math.log(KILOBYTES));
return parseFloat((bytes / Math.pow(KILOBYTES, i)).toFixed(decimals < 0 ? 0 : decimals)).toString().concat(` ${sizes[i]}`); return parseFloat((bytes / Math.pow(KILOBYTES, i)).toFixed(decimals < 0 ? 0 : decimals)).toString().concat(` ${sizes[i]}`);
} }
function replaceholder(data, size, timestamp, timeoffset, originalname) { export function replaceholder(data: string, size: number, timestamp: number, timeoffset: string, originalname: string) {
return data return data
.replace(/&size/g, formatBytes(size)) .replace(/&size/g, formatBytes(size))
.replace(/&filename/g, originalname) .replace(/&filename/g, originalname)
.replace(/&timestamp/g, formatTimestamp(timestamp, timeoffset)); .replace(/&timestamp/g, formatTimestamp(timestamp, timeoffset));
} }
function getDatedDirname() { export function getDatedDirname() {
if (!saveWithDate) return diskFilePath; if (!saveWithDate) return diskFilePath;
// Get current month and year // Get current month and year
@ -73,23 +75,20 @@ function getDatedDirname() {
return `${diskFilePath}${diskFilePath.endsWith('/') ? '' : '/'}${year}-${`0${month}`.slice(-2)}`; // skipcq: JS-0074 return `${diskFilePath}${diskFilePath.endsWith('/') ? '' : '/'}${year}-${`0${month}`.slice(-2)}`; // skipcq: JS-0074
} }
// Set up pathing & the logger export function arrayEquals(arr1: any[], arr2: any[]) {
const path = (...paths) => Path.join(__dirname, ...paths); return arr1.length === arr2.length && arr1.slice().sort().every((value: string, index: number) => value === arr2.slice().sort()[index])
const logger = new TLog({ };
level: process.env.LOG_LEVEL || (process.env.NODE_ENV === 'production' ? 'info' : 'debug'),
timestamp: { export function verify(req: AssRequest, users: JSON) {
enabled: true, return req.headers.authorization && Object.prototype.hasOwnProperty.call(users, req.headers.authorization);
colour: 'grey', }
preset: 'DATETIME_MED'
}, export function generateId(mode: string, length: number, gfyLength: number, originalName: string) {
}); return (GENERATORS.has(mode) ? GENERATORS.get(mode)({ length, gfyLength }) : originalName);
}
// Enable the Express logger
logger // Set up pathing
.env('ASS_ENV') export const path = (...paths: string[]) => Path.join(process.cwd(), ...paths); // '..' was added to make it easier to run files after moving the project to src/
//.enable.process({ uncaughtException: false }).debug('Plugin enabled', 'Process')
.enable.express({ handle500: false }).debug('Plugin enabled', 'Express')
.enable.socket().debug('Plugin enabled', 'Socket');
const idModes = { const idModes = {
zws: 'zws', // Zero-width spaces (see: https://zws.im/) zws: 'zws', // Zero-width spaces (see: https://zws.im/)
@ -103,8 +102,8 @@ GENERATORS.set(idModes.zws, zwsGen);
GENERATORS.set(idModes.r, randomGen); GENERATORS.set(idModes.r, randomGen);
GENERATORS.set(idModes.gfy, gfyGen); GENERATORS.set(idModes.gfy, gfyGen);
export const isProd = require('@tycrek/isprod')();
module.exports = { module.exports = {
isProd: require('@tycrek/isprod')(),
path, path,
getTrueHttp, getTrueHttp,
getTrueDomain, getTrueDomain,
@ -117,26 +116,27 @@ module.exports = {
getDatedDirname, getDatedDirname,
randomHexColour, randomHexColour,
sanitize, sanitize,
verify: (req, users) => req.headers.authorization && Object.prototype.hasOwnProperty.call(users, req.headers.authorization), verify,
renameFile: (req, newName) => new Promise((resolve, reject) => { renameFile: (req: AssRequest, newName: string) => new Promise((resolve: Function, reject) => {
try { try {
const paths = [req.file.destination, newName]; const paths = [req.file!.destination, newName];
fs.rename(path(req.file.path), path(...paths)); fs.rename(path(req.file!.path), path(...paths));
req.file.path = Path.join(...paths); req.file!.path = Path.join(...paths);
resolve(); resolve();
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
}), }),
generateToken: () => token(), generateToken: () => token(),
generateId: (mode, length, gfyLength, originalName) => (GENERATORS.has(mode) ? GENERATORS.get(mode)({ length, gfyLength }) : originalName), generateId,
arrayEquals: (arr1, arr2) => arr1.length === arr2.length && arr1.slice().sort().every((value, index) => value === arr2.slice().sort()[index]), arrayEquals,
downloadTempS3: (file) => new Promise((resolve, reject) => downloadTempS3: (file: FileData) => new Promise((resolve: Function, reject) =>
fetch(getS3url(file.randomId, file.ext)) fetch(getS3url(file.randomId, file.ext))
.then((f2) => f2.body.pipe(fs.createWriteStream(Path.join(__dirname, diskFilePath, sanitize(file.originalname))).on('close', () => resolve()))) .then((f2) => f2.body.pipe(fs.createWriteStream(Path.join(__dirname, diskFilePath, sanitize(file.originalname))).on('close', () => resolve())))
.catch(reject)), .catch(reject)),
} }
export const log = logger;
/** /**
* @type {TLog} * @type {TLog}
*/ */

@ -1,5 +1,6 @@
const Vibrant = require('node-vibrant'); import { FileData } from './definitions';
const { randomHexColour } = require('./utils'); import Vibrant from 'node-vibrant';
import { randomHexColour } from './utils';
// Vibrant parameters // Vibrant parameters
const COLOR_COUNT = 256; const COLOR_COUNT = 256;
@ -11,13 +12,13 @@ const QUALITY = 3;
* @param {*} resolve Runs if Promise was successful * @param {*} resolve Runs if Promise was successful
* @param {*} reject Runs if Promise failed * @param {*} reject Runs if Promise failed
*/ */
function getVibrant(file, resolve, reject) { function getVibrant(file: FileData, resolve: Function, reject: Function) {
Vibrant.from(file.path) Vibrant.from(file.path)
.maxColorCount(COLOR_COUNT) .maxColorCount(COLOR_COUNT)
.quality(QUALITY) .quality(QUALITY)
.getPalette() .getPalette()
.then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b].population - palettes[a].population)[0]].hex)) .then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b]!.population - palettes[a]!.population)[0]]!.hex))
.catch(reject); .catch((err) => reject(err));
} }
/** /**
@ -25,4 +26,4 @@ function getVibrant(file, resolve, reject) {
* @param {*} file The file to get a colour from * @param {*} file The file to get a colour from
* @returns The Vibrant colour as a Hex value (or random Hex value for videos) * @returns The Vibrant colour as a Hex value (or random Hex value for videos)
*/ */
module.exports = (file) => new Promise((resolve, reject) => !file.is.image ? resolve(randomHexColour()) : getVibrant(file, resolve, reject)); // skipcq: JS-0229 export default (file: FileData) => new Promise((resolve, reject) => !file.is.image ? resolve(randomHexColour()) : getVibrant(file, resolve, reject)); // skipcq: JS-0229

@ -0,0 +1,19 @@
{
"extends": "@tsconfig/node14/tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"lib": [
"ES2020",
"DOM"
],
"allowJs": true,
"downlevelIteration": true
},
"include": [
"src/**/*.js",
"src/**/*.ts"
],
"exclude": [
"ass-x"
]
}
Loading…
Cancel
Save