Merge branch 'master' into tailwind

pull/101/head
tycrek 3 years ago
commit 8ae078c5be
No known key found for this signature in database
GPG Key ID: 25D74F3943625263

26
.github/README.md vendored

@ -72,6 +72,7 @@ ass was designed with developers in mind. If you are a developer & want somethin
- **Multiple file storage methods** - **Multiple file storage methods**
- Local file system - Local file system
- Amazon S3, including [DigitalOcean Spaces] - Amazon S3, including [DigitalOcean Spaces]
- [Skynet] (free decentralized storage on the [Sia] blockchain)
- **Multiple data storage methods** using [data engines] - **Multiple data storage methods** using [data engines]
- **File** - **File**
- JSON (default, [papito]) - JSON (default, [papito])
@ -84,6 +85,8 @@ ass was designed with developers in mind. If you are a developer & want somethin
[Git Submodules]: https://git-scm.com/book/en/v2/Git-Tools-Submodules [Git Submodules]: https://git-scm.com/book/en/v2/Git-Tools-Submodules
[ZWS]: https://zws.im [ZWS]: https://zws.im
[DigitalOcean Spaces]: https://www.digitalocean.com/products/spaces/ [DigitalOcean Spaces]: https://www.digitalocean.com/products/spaces/
[Skynet]: https://siasky.net/
[Sia]: https://sia.tech/
[data engines]: #data-engines [data engines]: #data-engines
[papito]: https://github.com/tycrek/papito [papito]: https://github.com/tycrek/papito
[ass-psql]: https://github.com/tycrek/ass-psql [ass-psql]: https://github.com/tycrek/ass-psql
@ -313,6 +316,29 @@ const path = require('path');
module.exports = (req, res, next) => res.sendFile(path.join(__dirname, 'index.html')); module.exports = (req, res, next) => res.sendFile(path.join(__dirname, 'index.html'));
``` ```
## File storage
ass supports three methods of file storage: local, S3, or [Skynet].
### Local
Local storage is the simplest option, but relies on you having a lot of disk space to store files, which can be costly.
### S3
Any existing object storage server that's compatible with [Amazon S3] can be used with ass. I personally host my files using Digital Ocean Spaces, which implements S3.
S3 servers are generally very fast and have very good uptime, though this will depend on the hosting provider and plan you choose.
### Skynet
[Skynet] is a decentralized CDN created by [Skynet Labs]. It utilizes the [Sia] blockchain, the leading decentralized cloud storage platform, which boasts "no signups, no servers, no trusted third parties".
For hosts who are looking for a reliable, always available storage solution with lots of capacity and no costs, Skynet may be your best option. However, uploads tend to be on the slower side (though speeds will improve as the Sia network grows).
[Amazon S3]: https://en.wikipedia.org/wiki/Amazon_S3
[Skynet Labs]: https://github.com/SkynetLabs
## Custom frontends ## Custom frontends
ass is intended to provide a strong backend for developers to build their own frontends around. [Git Submodules] make it easy to create custom frontends. Submodules are their own projects, which means you are free to build the router however you wish, as long as it exports the required items. A custom frontend is really just an [Express.js router]. ass is intended to provide a strong backend for developers to build their own frontends around. [Git Submodules] make it easy to create custom frontends. Submodules are their own projects, which means you are free to build the router however you wish, as long as it exports the required items. A custom frontend is really just an [Express.js router].

688
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -37,6 +37,7 @@
"url": "https://patreon.com/tycrek" "url": "https://patreon.com/tycrek"
}, },
"dependencies": { "dependencies": {
"@skynetlabs/skynet-nodejs": "^2.3.0",
"@tsconfig/node14": "^1.0.1", "@tsconfig/node14": "^1.0.1",
"@tycrek/express-nofavicon": "^1.0.3", "@tycrek/express-nofavicon": "^1.0.3",
"@tycrek/express-postcss": "^0.1.0", "@tycrek/express-postcss": "^0.1.0",

@ -16,7 +16,7 @@ if (doSetup) {
} }
// Load the config // Load the config
const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile } = require('../config.json'); const { host, port, useSsl, isProxied, s3enabled, frontendName, indexFile, useSia } = require('../config.json');
//#region Imports //#region Imports
import fs from 'fs-extra'; import fs from 'fs-extra';
@ -110,4 +110,4 @@ log
.info('Frontend', ASS_FRONTEND.enabled ? ASS_FRONTEND.brand : 'disabled', `${ASS_FRONTEND.enabled ? `${getTrueHttp()}${getTrueDomain()}${ASS_FRONTEND.endpoint}` : ''}`) .info('Frontend', ASS_FRONTEND.enabled ? ASS_FRONTEND.brand : 'disabled', `${ASS_FRONTEND.enabled ? `${getTrueHttp()}${getTrueDomain()}${ASS_FRONTEND.endpoint}` : ''}`)
.info('Custom index', ASS_INDEX_ENABLED ? `enabled` : 'disabled') .info('Custom index', ASS_INDEX_ENABLED ? `enabled` : 'disabled')
.blank() .blank()
.express().Host(app, port, host, () => log.success('Ready for uploads', `Storing resources ${s3enabled ? 'in S3' : 'on disk'}`)); .express().Host(app, port, host, () => log.success('Ready for uploads', `Storing resources ${s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'}`));

@ -4,7 +4,8 @@ import fs from 'fs-extra';
import escape from 'escape-html'; import escape from 'escape-html';
import fetch, { Response } from 'node-fetch'; import fetch, { Response } from 'node-fetch';
import { deleteS3 } from '../storage'; import { deleteS3 } from '../storage';
const { diskFilePath, s3enabled, viewDirect } = require('../../config.json'); import { SkynetDelete, SkynetDownload } from '../skynet';
const { diskFilePath, s3enabled, viewDirect, useSia } = require('../../config.json');
import { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } from '../utils'; import { path, log, getTrueHttp, getTrueDomain, formatBytes, formatTimestamp, getS3url, getDirectUrl, getResourceColor, replaceholder } from '../utils';
const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../../MagicNumbers.json'); const { CODE_UNAUTHORIZED, CODE_NOT_FOUND, } = require('../../MagicNumbers.json');
import { data } from '../data'; import { data } from '../data';
@ -70,13 +71,16 @@ router.get('/direct*', (req: AssRequest, res: AssResponse, next) => data.get(req
file.headers.forEach((value, header) => res.setHeader(header, value)); file.headers.forEach((value, header) => res.setHeader(header, value));
file.body?.pipe(res); file.body?.pipe(res);
}), }),
sia: () => SkynetDownload(fileData)
.then((stream) => stream.pipe(res))
.then(() => SkynetDelete(fileData)),
local: () => { local: () => {
res.header('Accept-Ranges', 'bytes').header('Content-Length', `${fileData.size}`).type(fileData.mimetype); res.header('Accept-Ranges', 'bytes').header('Content-Length', `${fileData.size}`).type(fileData.mimetype);
fs.createReadStream(fileData.path).pipe(res); fs.createReadStream(fileData.path).pipe(res);
} }
}; };
uploaders[s3enabled ? 's3' : 'local'](); return uploaders[fileData.randomId.startsWith('sia://') ? 'sia' : s3enabled ? 's3' : 'local']();
}).catch(next)); }).catch(next));
// Thumbnail response // Thumbnail response
@ -123,7 +127,7 @@ router.get('/delete/:deleteId', (req: AssRequest, res: AssResponse, next) => {
// Save the file information // Save the file information
return Promise.all([ return Promise.all([
s3enabled ? deleteS3(fileData) : fs.rmSync(path(fileData.path)), s3enabled ? deleteS3(fileData) : !useSia ? fs.rmSync(path(fileData.path)) : () => Promise.resolve(),
(!fileData.is || (fileData.is.image || fileData.is.video)) && fs.existsSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) (!fileData.is || (fileData.is.image || fileData.is.video)) && fs.existsSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail))
? fs.rmSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) : () => Promise.resolve()]); ? fs.rmSync(path(diskFilePath, 'thumbnails/', fileData.thumbnail)) : () => Promise.resolve()]);
}) })

@ -15,6 +15,7 @@ const config = {
dataEngine: '@tycrek/papito', dataEngine: '@tycrek/papito',
frontendName: 'ass-x', frontendName: 'ass-x',
indexFile: '', indexFile: '',
useSia: false,
s3enabled: false, s3enabled: false,
}; };
@ -178,6 +179,12 @@ function doSetup() {
default: config.indexFile, default: config.indexFile,
required: false required: false
}, },
useSia: {
description: 'Use Sia Skynet for decentralized file storage?',
type: 'boolean',
default: config.useSia,
required: false
},
s3enabled: { s3enabled: {
description: 'Enable uploading to S3 storage endpoints', description: 'Enable uploading to S3 storage endpoints',
type: 'boolean', type: 'boolean',

@ -0,0 +1,37 @@
import { FileData } from "./definitions";
import fs, { ReadStream } from 'fs-extra';
import { path } from './utils';
const { SkynetClient } = require('@skynetlabs/skynet-nodejs');
function getFullPath(fileData: FileData) {
return path('share', '.skynet', `${fileData.randomId}${fileData.ext}`.replace(/sia\:\/\//gi, ''));
}
// Create the SkyNet client
export const Skynet = new SkynetClient();
export function SkynetUpload(path: string): Promise<string> {
return new Promise(async (resolve, reject) => {
try {
const skylink = await Skynet.uploadFile(path);
resolve(skylink);
} catch (error) {
reject(error);
}
});
}
export function SkynetDownload(fileData: FileData): Promise<ReadStream> {
return new Promise((resolve: Function, reject) =>
fs.ensureDir(path('share', '.skynet'))
.then(async () => {
await Skynet.downloadFile(getFullPath(fileData), fileData.randomId);
return fs.createReadStream(getFullPath(fileData))
})
.then((stream) => resolve(stream))
.catch(reject));
}
export function SkynetDelete(fileData: FileData) {
return fs.remove(getFullPath(fileData));
}

@ -8,7 +8,8 @@ import Thumbnail from './thumbnails';
import Vibrant from './vibrant'; import Vibrant from './vibrant';
import Hash from './hash'; import Hash from './hash';
import { generateId, log } from './utils'; import { generateId, log } from './utils';
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, diskFilePath, saveAsOriginal, saveWithDate, mediaStrict, maxUploadSize } = require('../config.json'); import { SkynetUpload } from './skynet';
const { s3enabled, s3endpoint, s3bucket, s3usePathStyle, s3accessKey, s3secretKey, diskFilePath, saveAsOriginal, saveWithDate, mediaStrict, maxUploadSize, useSia } = require('../config.json');
const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('../MagicNumbers.json'); const { CODE_UNSUPPORTED_MEDIA_TYPE } = require('../MagicNumbers.json');
const ID_GEN_LENGTH = 32; const ID_GEN_LENGTH = 32;
@ -94,27 +95,31 @@ export function processUploaded(req: AssRequest, res: AssResponse, next: Functio
.then(() => { if (req.file!.size / Math.pow(1024, 2) > maxUploadSize) throw new Error('LIMIT_FILE_SIZE'); }) .then(() => { if (req.file!.size / Math.pow(1024, 2) > maxUploadSize) throw new Error('LIMIT_FILE_SIZE'); })
// Save file // Save file
.then(() => log.debug('Saving file', req.file!.originalname, s3enabled ? 'in S3' : 'on disk')) .then(() => log.debug('Saving file', req.file!.originalname, s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'))
.then(() => .then(() =>
// skipcq: JS-0229 // skipcq: JS-0229
new Promise((resolve, reject) => s3enabled new Promise((resolve, reject) => {
// Upload to Amazon S3 // Upload to Amazon S3
? s3.putObject({ if (s3enabled) return s3.putObject({
Bucket: s3bucket, Bucket: s3bucket,
Key: req.file!.randomId.concat(req.file!.ext), Key: req.file!.randomId.concat(req.file!.ext),
ACL: 'public-read', ACL: 'public-read',
ContentType: req.file!.mimetype, ContentType: req.file!.mimetype,
Body: fs.createReadStream(req.file!.path) Body: fs.createReadStream(req.file!.path)
}).promise().then(resolve).catch(reject) }).promise().then(resolve).catch(reject);
// Use Sia Skynet
else if (useSia) return SkynetUpload(req.file!.path)
.then((skylink) => req.file!.randomId = skylink)
.then(resolve).catch(reject);
// Save to local storage // Save to local storage
: fs.ensureDir(getDatedDirname()) else return fs.ensureDir(getDatedDirname())
.then(() => fs.copy(req.file!.path, getLocalFilename(req), { preserveTimestamps: true })) .then(() => fs.copy(req.file!.path, getLocalFilename(req), { preserveTimestamps: true }))
.then(resolve) .then(resolve).catch(reject);
.catch(reject) }))
)) .then(() => log.debug('File saved', req.file!.originalname, s3enabled ? 'in S3' : useSia ? 'on Sia blockchain' : 'on disk'))
.then(() => log.debug('File saved', req.file!.originalname, s3enabled ? 'in S3' : 'on disk'))
.catch((err) => next(err)) .catch((err) => next(err))
// Delete the file // Delete the file

Loading…
Cancel
Save