0.4.0 - Merge pull request #18 from tycrek/s3

pull/19/head
Josh Moore 4 years ago committed by GitHub
commit 455cfbfbfe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -21,6 +21,8 @@
- ✔️ File deletion
- ✔️ Usage metrics
- ✔️ Thumbnail support
- ✔️ Basic multi-user support
- ✔️ Local storage *or* block-storage support for [Amazon S3](https://aws.amazon.com/s3/) (including [DigitalOcean Spaces](https://www.digitalocean.com/products/spaces/))
- ✔️ Multiple access types
- **[ZWS](https://zws.im)**
- **Mixed-case alphanumeric**
@ -31,8 +33,6 @@
- **Mongo** (soon!)
- **MySQL** (soon!)
- **PostgreSQL** (soon!)
- ❌ Multi-user support (upload restrictions, web library, etc.)
- ❌ Block-storage support including Amazon S3
### Access types
@ -40,7 +40,7 @@
| ---- | ----------- |
| **[ZWS](https://zws.im)** (Zero-width spaces) | The "fancy" mode. When pasted elsewhere, the URL appears to be *just* your domain name. ![ZWS sample](https://user-images.githubusercontent.com/29926144/113785625-bf43a480-96f4-11eb-8dd7-7f164f33ada2.png "ZWS sample") |
| **Mixed-case alphanumeric** | The "safe" mode. URL's are browser safe as the character set is just letters & numbers. |
| **Gfycat** | Gfycat-style ID's (for example: `https://gfycat.com/unsungdiscretegrub` "unsung discrete grub") |
| **Gfycat** | Gfycat-style ID's (for example: `https://gfycat.com/unsungdiscretegrub` "unsung discrete grub"). Thanks to [Gfycat](https://gfycat.com) for the wordlists |
| **Original** | The "basic" mode. URL matches the same filename as when the file was uploaded. This may be prone to conflicts with files of the same name. |
## Installation
@ -152,3 +152,5 @@ No strict contributing rules at this time. I appreciate any Issues or Pull Reque
## Credits
- Special thanks to [hlsl#1359](http://be.net/zevwolf) for the awesome logo!
- [@ToxicAven](https://github.com/ToxicAven) for the Flameshot script
- [Gfycat](https://gfycat.com) for their wordlists

@ -1 +0,0 @@
theme: jekyll-theme-minimal

281
ass.js

@ -7,21 +7,25 @@ try {
}
// Load the config
const { host, port, domain, useSsl, resourceIdSize, gfyIdSize, resourceIdType, isProxied, diskFilePath, saveWithDate, saveAsOriginal } = require('./config.json');
const { host, port, domain, useSsl, resourceIdSize, gfyIdSize, resourceIdType, isProxied, s3enabled, saveAsOriginal } = require('./config.json');
//#region Imports
const fs = require('fs-extra');
const express = require('express');
const escape = require('escape-html');
const useragent = require('express-useragent');
const rateLimit = require("express-rate-limit");
const fetch = require('node-fetch');
const marked = require('marked');
const multer = require('multer');
const DateTime = require('luxon').DateTime;
const { WebhookClient, MessageEmbed } = require('discord.js');
const OpenGraph = require('./ogp');
const Thumbnail = require('./thumbnails');
const Vibrant = require('./vibrant');
const { path, saveData, log, verify, generateToken, generateId, formatBytes, randomHexColour, arrayEquals } = require('./utils');
const Hash = require('./hash');
const Path = require('path');
const { uploadLocal, uploadS3, deleteS3 } = require('./storage');
const { path, saveData, log, verify, generateToken, generateId, formatBytes, arrayEquals, getS3url, downloadTempS3, sanitize } = require('./utils');
//#endregion
//#region Variables, module setup
@ -29,23 +33,6 @@ const ASS_LOGO = 'https://cdn.discordapp.com/icons/848274994375294986/8d339d4a2f
const app = express();
// Configure filename and location settings
const storage = multer.diskStorage({
filename: saveAsOriginal ? (_req, file, callback) => callback(null, file.originalname) : null,
destination: !saveWithDate ? diskFilePath : (_req, _file, callback) => {
// Get current month and year
let [month, _day, year] = new Date().toLocaleDateString("en-US").split("/");
// Add 0 before single digit months eg ( 6 turns into 06)
let folder = `${diskFilePath}/${year}-${("0" + month).slice(-2)}`;
// Create folder if it doesn't exist
fs.ensureDirSync(folder);
callback(null, folder);
}
});
var upload = multer({ storage });
var users = {};
var data = {};
//#endregion
@ -88,32 +75,73 @@ function startup() {
app.set('view engine', 'pug');
app.use(useragent.express());
// Rate limit
app.use(rateLimit({
windowMs: 1000 * 60, // 60 seconds
max: 90 // Limit each IP to 30 requests per windowMs
}));
// Don't process favicon requests
app.use((req, res, next) => req.url.includes('favicon.ico') ? res.sendStatus(204) : next());
// Middleware for parsing the resource ID and handling 404
app.use('/:resourceId', (req, res, next) => {
// Parse the resource ID
req.ass = { resourceId: req.params.resourceId.split('.')[0] };
// If the ID is invalid, return 404. Otherwise, continue normally
(!req.ass.resourceId || !data[req.ass.resourceId]) ? res.sendStatus(404) : next();
});
// Index
app.get('/', (_req, res) => fs.readFile(path('README.md')).then((bytes) => bytes.toString()).then(marked).then((data) => res.render('index', { data })));
// Rate limit
app.post('/', rateLimit({
windowMs: 1000 * 60, // 60 seconds
max: 30 // Limit each IP to 30 requests per windowMs
}));
// Block unauthorized requests and attempt token sanitization
app.post('/', (req, res, next) => {
req.token = req.headers.authorization.replace(/[^\da-z]/gi, '');
!verify(req, users) ? res.sendStatus(401) : next();
});
// Upload file
app.post('/', upload.single('file'), (req, res) => {
// Prevent uploads from unauthorized clients
if (!verify(req, users)) return res.sendStatus(401);
// Generate ID's to use for other functions
app.post('/', (req, _res, next) => (req.randomId = generateId('random', 32, null, null), next()));
app.post('/', (req, _res, next) => (req.deleteId = generateId('random', 32, null, null), next()));
// Upload file (local & S3)
s3enabled
? app.post('/', (req, res, next) => uploadS3(req, res, (error) => ((error) && console.error(error), next())))
: app.post('/', uploadLocal, ({ next }) => next());
// Pre-response operations
app.post('/', (req, _res, next) => {
req.file.randomId = req.randomId;
req.file.deleteId = req.deleteId;
// Sanitize filename just in case Multer didn't catch it
req.file.originalname = sanitize(req.file.originalname);
// Download a temp copy to work with if using S3 storage
(s3enabled ? downloadTempS3(req.file) : new Promise((resolve) => resolve()))
// Generate the Thumbnail, Vibrant, and SHA1 hash
.then(() => Promise.all([Thumbnail(req.file), Vibrant(req.file), Hash(req.file)]))
.then(([thumbnail, vibrant, sha1]) => (
req.file.thumbnail = thumbnail,
req.file.vibrant = vibrant,
req.file.sha1 = sha1
))
// Remove the temp file if using S3 storage, otherwise rename the local file
.then(() => s3enabled ? fs.remove(path('uploads/', req.file.originalname)) : renameFile(saveAsOriginal ? req.file.originalname : req.file.sha1))
.then(() => next())
.catch((err) => next(err));
function renameFile(newName) {
return new Promise((resolve, reject) => {
try {
let paths = [req.file.destination, newName];
fs.rename(path(req.file.path), path(...paths));
req.file.path = Path.join(...paths);
resolve();
} catch (err) {
reject(err);
}
});
}
});
// Process uploaded file
app.post('/', (req, res) => {
// Load overrides
let trueDomain = getTrueDomain(req.headers["x-ass-domain"]);
let generator = req.headers["x-ass-access"] || resourceIdType;
@ -122,8 +150,7 @@ function startup() {
req.file.timestamp = DateTime.now().toMillis();
// Keep track of the token that uploaded the resource
let uploadToken = req.headers.authorization;
req.file.token = uploadToken;
req.file.token = req.token;
// Attach any embed overrides, if necessary
req.file.opengraph = {
@ -136,80 +163,98 @@ function startup() {
color: req.headers['x-ass-og-color']
};
// Generate a thumbnail & get the Vibrant colour
Promise.all([Thumbnail(req.file), (req.file.mimetype.includes('video') ? randomHexColour() : Vibrant(req.file))])
.then(([thumbnail, vibrant]) => (req.file.thumbnail = thumbnail, req.file.vibrant = vibrant))
.catch(console.error)
// Finish processing the file
.then(() => {
// Save the file information
let resourceId = generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file.originalname);
data[resourceId.split('.')[0]] = req.file;
saveData(data);
// Save the file information
let resourceId = generateId(generator, resourceIdSize, req.headers['x-ass-gfycat'] || gfyIdSize, req.file.originalname);
data[resourceId.split('.')[0]] = req.file;
saveData(data);
// Log the upload
let logInfo = `${req.file.originalname} (${req.file.mimetype})`;
log(`Uploaded: ${logInfo} (user: ${users[uploadToken] ? users[uploadToken].username : '<token-only>'})`);
// Build the URLs
let resourceUrl = `${getTrueHttp()}${trueDomain}/${resourceId}`;
let thumbnailUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/thumbnail`;
let deleteUrl = `${getTrueHttp()}${trueDomain}/delete/${req.file.filename}`;
// Send the response
res.type('json').send({ resource: resourceUrl, thumbnail: thumbnailUrl, delete: deleteUrl })
.on('finish', () => {
// After we have sent the user the response, also send a Webhook to Discord (if headers are present)
if (req.headers['x-ass-webhook-client'] && req.headers['x-ass-webhook-token']) {
// Build the webhook client & embed
let whc = new WebhookClient(req.headers['x-ass-webhook-client'], req.headers['x-ass-webhook-token']);
let embed = new MessageEmbed()
.setTitle(logInfo)
.setURL(resourceUrl)
.setDescription(`**Size:** \`${formatBytes(req.file.size)}\`\n**[Delete](${deleteUrl})**`)
.setThumbnail(thumbnailUrl)
.setColor(req.file.vibrant)
.setTimestamp(req.file.timestamp);
// Send the embed to the webhook, then delete the client after to free resources
whc.send(null, {
username: req.headers['x-ass-webhook-username'] || 'ass',
avatarURL: req.headers['x-ass-webhook-avatar'] || ASS_LOGO,
embeds: [embed]
}).then((_msg) => whc.destroy());
}
// Also update the users upload count
if (!users[uploadToken]) {
let generator = () => generateId('random', 20, null);
let username = generator();
while (Object.values(users).findIndex((user) => user.username == username) != -1)
username = generator();
users[uploadToken] = { username, count: 0 };
}
users[uploadToken].count += 1;
fs.writeJsonSync(path('auth.json'), { users }, { spaces: 4 })
});
// Log the upload
let logInfo = `${req.file.originalname} (${req.file.mimetype})`;
log(`Uploaded: ${logInfo} (user: ${users[req.token] ? users[req.token].username : '<token-only>'})`);
// Build the URLs
let resourceUrl = `${getTrueHttp()}${trueDomain}/${resourceId}`;
let thumbnailUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/thumbnail`;
let deleteUrl = `${getTrueHttp()}${trueDomain}/${resourceId}/delete/${req.file.deleteId}`;
// Send the response
res.type('json').send({ resource: resourceUrl, thumbnail: thumbnailUrl, delete: deleteUrl })
.on('finish', () => {
// After we have sent the user the response, also send a Webhook to Discord (if headers are present)
if (req.headers['x-ass-webhook-client'] && req.headers['x-ass-webhook-token']) {
// Build the webhook client & embed
let whc = new WebhookClient(req.headers['x-ass-webhook-client'], req.headers['x-ass-webhook-token']);
let embed = new MessageEmbed()
.setTitle(logInfo)
.setURL(resourceUrl)
.setDescription(`**Size:** \`${formatBytes(req.file.size)}\`\n**[Delete](${deleteUrl})**`)
.setThumbnail(thumbnailUrl)
.setColor(req.file.vibrant)
.setTimestamp(req.file.timestamp);
// Send the embed to the webhook, then delete the client after to free resources
whc.send(null, {
username: req.headers['x-ass-webhook-username'] || 'ass',
avatarURL: req.headers['x-ass-webhook-avatar'] || ASS_LOGO,
embeds: [embed]
}).then((_msg) => whc.destroy());
}
// Also update the users upload count
if (!users[req.token]) {
let generator = () => generateId('random', 20, null);
let username = generator();
while (Object.values(users).findIndex((user) => user.username == username) != -1)
username = generator();
users[req.token] = { username, count: 0 };
}
users[req.token].count += 1;
fs.writeJsonSync(path('auth.json'), { users }, { spaces: 4 })
});
});
// Middleware for parsing the resource ID and handling 404
app.use('/:resourceId', (req, res, next) => {
// Parse the resource ID
req.ass = { resourceId: escape(req.params.resourceId).split('.')[0] };
// If the ID is invalid, return 404. Otherwise, continue normally
(!req.ass.resourceId || !data[req.ass.resourceId]) ? res.sendStatus(404) : next();
});
// View file
app.get('/:resourceId', (req, res) => {
let resourceId = req.ass.resourceId;
let fileData = data[resourceId];
let requiredItems = {
randomId: fileData.randomId,
originalname: escape(fileData.originalname),
mimetype: fileData.mimetype,
size: fileData.size,
timestamp: fileData.timestamp,
opengraph: fileData.opengraph,
vibrant: fileData.vibrant,
};
// If the client is Discord, send an Open Graph embed
if (req.useragent.isBot) return res.type('html').send(new OpenGraph(getTrueHttp(), getTrueDomain(), resourceId, data[resourceId]).build());
if (req.useragent.isBot) return res.type('html').send(new OpenGraph(getTrueHttp(), getTrueDomain(), resourceId, requiredItems).build());
// Return the file differently depending on what storage option was used
let uploaders = {
s3: () => fetch(getS3url(fileData.randomId, fileData.mimetype)).then((file) => {
file.headers.forEach((value, header) => res.setHeader(header, value));
file.body.pipe(res);
}),
local: () => {
res.header('Accept-Ranges', 'bytes').header('Content-Length', fileData.size).type(fileData.mimetype);
fs.createReadStream(path(fileData.path)).pipe(res);
}
};
// Read the file and send it to the client
fs.readFile(path(data[resourceId].path))
.then((fileData) => res
.header('Accept-Ranges', 'bytes')
.header('Content-Length', fileData.byteLength)
.type(data[resourceId].mimetype).send(fileData))
.catch(console.error);
uploaders[s3enabled ? 's3' : 'local']();
});
// Thumbnail response
@ -241,22 +286,28 @@ function startup() {
});
// Delete file
app.get('/delete/:filename', (req, res) => {
let filename = req.params.filename;
let resourceId = Object.keys(data)[Object.values(data).indexOf(Object.values(data).find((d) => d.filename == filename))];
app.get('/:resourceId/delete/:deleteId', (req, res) => {
let resourceId = req.ass.resourceId;
let deleteId = escape(req.params.deleteId);
let fileData = data[resourceId];
// If the delete ID doesn't match, don't delete the file
if (deleteId !== fileData.deleteId) return res.sendStatus(401);
// If the ID is invalid, return 400 because we are unable to process the resource
if (!resourceId || !data[resourceId]) return res.sendStatus(400);
if (!resourceId || !fileData) return res.sendStatus(400);
log(`Deleted: ${data[resourceId].originalname} (${data[resourceId].mimetype})`);
log(`Deleted: ${fileData.originalname} (${fileData.mimetype})`);
// Save the file information
fs.rmSync(path(data[resourceId].path));
delete data[resourceId];
saveData(data);
res.type('text').send('File has been deleted!');
})
Promise.all([s3enabled ? deleteS3(fileData) : fs.rmSync(path(fileData.path)), fs.rmSync(path('uploads/thumbnails/', fileData.thumbnail))])
.then(() => {
delete data[resourceId];
saveData(data);
res.type('text').send('File has been deleted!');
})
.catch(console.error);
});
app.listen(port, host, () => log(`Server started on [${host}:${port}]\nAuthorized users: ${Object.keys(users).length}\nAvailable files: ${Object.keys(data).length}`));
}

@ -0,0 +1,13 @@
const fs = require('fs-extra');
const crypto = require('crypto');
const toArray = require('stream-to-array')
const { path } = require('./utils');
const { s3enabled } = require('./config.json');
module.exports = (file) =>
new Promise((resolve, reject) =>
toArray((fs.createReadStream(s3enabled ? path('uploads/', file.originalname) : path(file.path))))
.then((parts) => Buffer.concat(parts.map((part) => Buffer.isBuffer(part) ? part : Buffer.from(part))))
.then((buf) => crypto.createHash('sha1').update(buf).digest('hex'))
.then(resolve)
.catch(reject));

@ -1,6 +1,8 @@
const fs = require('fs-extra');
const path = require('path');
const { s3enabled } = require('./config.json');
const { formatBytes } = require('./utils');
const { bucketSize } = require('./storage');
module.exports = () => {
let data = fs.readJsonSync(path.join(__dirname, 'data.json'));
@ -21,17 +23,23 @@ module.exports = () => {
}
});
console.log('---- Usage metrics ----\n');
console.log(`Users: ${Object.keys(users).length}`);
console.log(`Files: ${Object.keys(data).length}`);
console.log('');
console.log(`Total size: ${formatBytes(totalSize)}`);
console.log(`Untracked size: ${formatBytes(oldSize)}`);
console.log('');
// Get AWS size
bucketSize()
.then((s3size) => {
console.log('---- Usage metrics ----\n');
console.log(`Users: ${Object.keys(users).length}`);
console.log(`Files: ${Object.keys(data).length}`);
console.log(`S3 size: ${s3enabled ? s3size : '--'}`);
console.log('');
console.log(`Total size: ${formatBytes(totalSize)}`);
console.log(`Untracked size: ${formatBytes(oldSize)}`);
console.log('');
Object.values(users).forEach(({ username, count, size }) => {
console.log(`- ${username}: ${formatBytes(size)} (${count} files)`);
});
Object.values(users).forEach(({ username, count, size }) => {
console.log(`- ${username}: ${formatBytes(size)} (${count} files)`);
});
})
.catch(console.error);
}
if (require.main === module) module.exports();

@ -1,13 +1,15 @@
const Mustache = require('mustache');
const DateTime = require('luxon').DateTime;
const github = require('./package.json').homepage;
const { formatBytes, randomHexColour } = require('./utils');
const { homepage, version } = require('./package.json');
const { s3enabled } = require('./config.json');
const { formatBytes, randomHexColour, getS3url, getSafeExt } = require('./utils');
// https://ogp.me/
class OpenGraph {
http;
domain;
resourceId;
randomId;
filename;
type;
@ -20,10 +22,11 @@ class OpenGraph {
author;
color;
constructor(http, domain, resourceId, { originalname, mimetype, size, timestamp, opengraph, vibrant }) {
constructor(http, domain, resourceId, { randomId, originalname, mimetype, size, timestamp, opengraph, vibrant }) {
this.http = http;
this.domain = domain;
this.resourceId = resourceId;
this.randomId = randomId;
this.type = mimetype;
this.filename = originalname;
@ -38,17 +41,19 @@ class OpenGraph {
}
build() {
let resourceUrl = !s3enabled ? (this.http + this.domain + "/" + this.resourceId + getSafeExt(this.type)) : getS3url(this.randomId, this.type);
return Mustache.render(html, {
github,
homepage,
version,
http: this.http,
domain: this.domain,
resourceId: this.resourceId,
resourceUrl,
media: `<${this.type.includes('video') ? 'video' : 'img'} src="${resourceUrl}" style="height: 50vh;">`,
ogtype: this.type.includes('video') ? 'video.other' : 'image',
type: this.type.includes('video') ? 'video' : 'image',
ext: this.type.includes('video') ? '.mp4' : this.type.includes('gif') ? '.gif' : '',
title: (this.title.length != 0) ? `<meta property="og:title" content="${this.title}">` : '',
description: (this.description.length != 0) ? `<meta property="og:description" content="${this.description}">` : '',
site: (this.author.length != 0) ? `<meta property="og:site_name" content="${this.author}">` : '',
@ -71,7 +76,7 @@ const html = `
<title>ass</title>
<!-- Open Graph (https://ogp.me/) -->
<meta property="og:type" content="{{{ogtype}}}">
<meta property="og:{{{type}}}" content="{{{http}}}{{{domain}}}/{{{resourceId}}}{{{ext}}}">
<meta property="og:{{{type}}}" content="{{{resourceUrl}}}">
{{{title}}}
{{{description}}}
{{{site}}}
@ -81,7 +86,9 @@ const html = `
<link rel="alternate" type="application/json+oembed" href="{{{http}}}{{{domain}}}/{{{resourceId}}}/oembed.json" title="oEmbed">
</head>
<body>
Open Graph response for <a href="{{{github}}}" target="_blank">ass</a>.
Open Graph response for <a href="{{{homepage}}}" target="_blank">ass</a> {{{version}}}
<br>
{{{media}}}
</body>
</html>
`;

699
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{
"name": "ass",
"version": "0.3.0",
"version": "0.4.0",
"description": "The superior self-hosted ShareX server",
"main": "ass.js",
"scripts": {
@ -26,8 +26,10 @@
"homepage": "https://github.com/tycrek/ass#readme",
"dependencies": {
"any-shell-escape": "^0.1.1",
"aws-sdk": "^2.930.0",
"crypto-random-string": "3.3.1",
"discord.js": "^12.5.3",
"escape-html": "^1.0.3",
"express": "^4.17.1",
"express-rate-limit": "^5.2.6",
"express-useragent": "^1.0.15",
@ -37,10 +39,14 @@
"luxon": "^1.26.0",
"marked": "^2.0.7",
"multer": "^1.4.2",
"multer-s3": "^2.9.0",
"mustache": "^4.2.0",
"node-fetch": "^2.6.1",
"node-vibrant": "*",
"prompt": "^1.1.0",
"pug": "^3.0.2",
"sanitize-filename": "^1.6.3",
"stream-to-array": "^2.3.0",
"uuid": "^8.3.2"
}
}

@ -1,5 +1,5 @@
// Default configuration
const config = {
var config = {
host: '0.0.0.0',
port: 40115,
domain: 'upload.example.com',
@ -11,6 +11,11 @@ const config = {
diskFilePath: "uploads/",
saveWithDate: false,
saveAsOriginal: true,
s3enabled: false,
s3endpoint: 'sfo3.digitaloceanspaces.com',
s3bucket: 'bucket-name',
s3accessKey: 'accessKey',
s3secretKey: 'secretKey',
};
// If directly called on the command line, run setup script
@ -19,6 +24,11 @@ if (require.main === module) {
const fs = require('fs-extra');
const prompt = require('prompt');
try {
let existingConfig = require('./config.json');
Object.keys(existingConfig).forEach((key) => config.hasOwnProperty(key) && (config[key] = existingConfig[key]))
} catch (ex) { console.log(ex) }
// Disabled the annoying "prompt: " prefix and removes colours
prompt.message = '';
prompt.colors = false;
@ -63,21 +73,20 @@ if (require.main === module) {
default: config.resourceIdSize,
required: false
},
gfyIdSize: {
description: 'Adjective count for "gfycat" Resource ID type',
type: 'integer',
default: config.gfyIdSize,
required: false
},
resourceIdType: {
description: 'Resource ID type (determines what kind of URL your uploads are visible at. Can be one of: original, zws, random)',
description: 'Resource ID type (determines what kind of URL your uploads are visible at. Can be one of: original, zws, random, gfycat)',
type: 'string',
default: config.resourceIdType,
require: false,
pattern: /(original|zws|random|gfycat)/gi,
message: 'Must be one of: original, zws, random, gfycat'
},
gfyIdSize: {
description: 'Adjective count for "gfycat" Resource ID type',
type: 'integer',
default: config.gfyIdSize,
required: false
},
diskFilePath: {
description: 'Relative path to save uploads to',
type: 'string',
@ -96,6 +105,36 @@ if (require.main === module) {
default: config.saveAsOriginal,
required: false
},
s3enabled: {
description: 'Enable uploading to S3 storage endpoints',
type: 'boolean',
default: config.s3enabled,
required: false
},
s3endpoint: {
description: 'S3 Endpoint URL to upload objects to',
type: 'string',
default: config.s3endpoint,
required: false
},
s3bucket: {
description: 'S3 Bucket name to upload objects to',
type: 'string',
default: config.s3bucket,
required: false
},
s3accessKey: {
description: 'Access key for the specified S3 API',
type: 'string',
default: config.s3accessKey,
required: false
},
s3secretKey: {
description: 'Secret key for the specified S3 API',
type: 'string',
default: config.s3secretKey,
required: false
},
}
};

@ -0,0 +1,57 @@
// https://docs.digitalocean.com/products/spaces/resources/s3-sdk-examples/
// https://www.digitalocean.com/community/tutorials/how-to-upload-a-file-to-object-storage-with-node-js
const fs = require('fs-extra');
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const { getSafeExt } = require('./utils');
const { diskFilePath, saveWithDate, s3enabled, s3endpoint, s3bucket, s3accessKey, s3secretKey } = require('./config.json');
const s3 = new aws.S3({
endpoint: new aws.Endpoint(s3endpoint),
credentials: new aws.Credentials({ accessKeyId: s3accessKey, secretAccessKey: s3secretKey })
});
const uploadS3 = multer({
storage: multerS3({
s3: s3,
bucket: s3bucket,
acl: 'public-read',
key: (req, file, cb) => cb(null, req.randomId.concat(getSafeExt(file.mimetype))),
contentType: (_req, file, cb) => cb(null, file.mimetype)
})
}).single('file');
const deleteS3 = (file) =>
new Promise((resolve, reject) =>
s3.deleteObject({ Bucket: s3bucket, Key: file.randomId.concat(getSafeExt(file.mimetype)) }).promise().then(resolve).catch(reject));
const uploadLocal = multer({
storage: multer.diskStorage({
destination: !saveWithDate ? diskFilePath : (_req, _file, cb) => {
// Get current month and year
let [month, _day, year] = new Date().toLocaleDateString("en-US").split("/");
// Add 0 before single digit months eg ( 6 turns into 06)
let folder = `${diskFilePath}/${year}-${("0" + month).slice(-2)}`;
// Create folder if it doesn't exist
fs.ensureDirSync(folder);
cb(null, folder);
}
})
}).single('file');
const bucketSize = () =>
new Promise((resolve, reject) => s3enabled ? listAllKeys(resolve, reject) : resolve(0));
function listAllKeys(resolve, reject, token) {
let allKeys = [];
s3.listObjectsV2({ Bucket: s3bucket, ContinuationToken: token }).promise()
.then((data) => (allKeys = allKeys.concat(data.Contents), data.IsTruncated ? listAllKeys(resolve, reject, data.NextContinuationToken) : resolve(allKeys.length)))
.catch(reject);
}
module.exports = { uploadLocal, uploadS3, deleteS3, bucketSize };

@ -2,7 +2,8 @@ const ffmpeg = require('ffmpeg-static');
const Jimp = require('jimp');
const shell = require('any-shell-escape');
const { exec } = require('child_process');
const { path } = require('./utils');
const { path, getS3url } = require('./utils');
const { s3enabled } = require('./config.json');
const THUMBNAIL_QUALITY = 50;
const THUMBNAIL_SIZE = 512;
@ -19,12 +20,12 @@ function getCommand(src, dest) {
}
function getVideoThumbnail(file) {
return new Promise((resolve, reject) => exec(getCommand(path(file.path), getNewNamePath(file.originalname)), (err) => err ? reject(err) : resolve()));
return new Promise((resolve, reject) => exec(getCommand(s3enabled ? path('uploads/', file.originalname) : path(file.path), getNewNamePath(file.originalname)), (err) => err ? reject(err) : resolve()));
}
function getResizedThumbnail(file) {
return new Promise((resolve, reject) =>
Jimp.read(path(file.path))
Jimp.read(s3enabled ? getS3url(file.randomId, file.mimetype) : path(file.path))
.then((image) => image
.quality(THUMBNAIL_QUALITY)
.resize(THUMBNAIL_SIZE, THUMBNAIL_SIZE, Jimp.RESIZE_BICUBIC)

@ -1,9 +1,12 @@
const fs = require('fs-extra');
const Path = require('path');
const fetch = require('node-fetch');
const sanitize = require("sanitize-filename");
const token = require('./generators/token');
const zwsGen = require('./generators/zws');
const randomGen = require('./generators/random');
const gfyGen = require('./generators/gfycat');
const { s3bucket, s3endpoint } = require('./config.json');
const idModes = {
zws: 'zws', // Zero-width spaces (see: https://zws.im/)
@ -37,5 +40,20 @@ module.exports = {
colour += letters[(Math.floor(Math.random() * 16))];
return colour;
},
arrayEquals: (arr1, arr2) => arr1.length === arr2.length && arr1.slice().sort().every((value, index) => value === arr2.slice().sort()[index])
arrayEquals: (arr1, arr2) => arr1.length === arr2.length && arr1.slice().sort().every((value, index) => value === arr2.slice().sort()[index]),
downloadTempS3: (file) => new Promise((resolve, reject) =>
fetch(getS3url(file.randomId, file.mimetype))
.then((f2) => f2.body.pipe(fs.createWriteStream(Path.join(__dirname, 'uploads/', sanitize(file.originalname))).on('close', () => resolve())))
.catch(reject)),
getS3url,
getSafeExt,
sanitize
}
function getS3url(s3key, type) {
return `https://${s3bucket}.${s3endpoint}/${s3key}${getSafeExt(type)}`;
}
function getSafeExt(type) {
return type.includes('video') ? '.mp4' : type.includes('gif') ? '.gif' : '';
}

@ -1,12 +1,15 @@
const Vibrant = require('node-vibrant');
const { path } = require('./utils');
const { path, randomHexColour } = require('./utils');
const { s3enabled } = require('./config.json');
const COLOR_COUNT = 256;
const QUALITY = 3;
module.exports = (file) =>
new Promise((resolve, reject) =>
Vibrant.from(path(file.path))
.maxColorCount(COLOR_COUNT).quality(QUALITY).getPalette()
.then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b].population - palettes[a].population)[0]].hex))
.catch(reject));
file.mimetype.includes('video')
? resolve(randomHexColour())
: Vibrant.from(s3enabled ? path('uploads/', file.originalname) : path(file.path))
.maxColorCount(COLOR_COUNT).quality(QUALITY).getPalette()
.then((palettes) => resolve(palettes[Object.keys(palettes).sort((a, b) => palettes[b].population - palettes[a].population)[0]].hex))
.catch(reject));

Loading…
Cancel
Save