Merge remote-tracking branch 'upstream/dev/0.15.0' into dev/0.15.0

x-dev-15
xwashere 6 months ago
commit 275cb73290
No known key found for this signature in database
GPG Key ID: 042F8BFA1B0EF93B

@ -2,7 +2,7 @@ import { UserConfiguration, UserConfigTypeChecker, PostgresConfiguration, MongoD
import fs from 'fs-extra'; import fs from 'fs-extra';
import { path } from '@tycrek/joint'; import { path } from '@tycrek/joint';
import { log } from './log'; import { log } from './log.js';
const FILEPATH = path.join('.ass-data/userconfig.json'); const FILEPATH = path.join('.ass-data/userconfig.json');

@ -8,15 +8,15 @@ import express, { Request, Response, NextFunction, RequestHandler, json as BodyP
import { path, isProd } from '@tycrek/joint'; import { path, isProd } from '@tycrek/joint';
import { epcss } from '@tycrek/express-postcss'; import { epcss } from '@tycrek/express-postcss';
import { log } from './log'; import { log } from './log.js';
import { get } from './data'; import { get } from './data.js';
import { UserConfig } from './UserConfig'; import { UserConfig } from './UserConfig.js';
import { DBManager } from './sql/database'; import { DBManager } from './sql/database.js';
import { JSONDatabase } from './sql/json'; import { JSONDatabase } from './sql/json.js';
import { MySQLDatabase } from './sql/mysql'; import { MySQLDatabase } from './sql/mysql.js';
import { PostgreSQLDatabase } from './sql/postgres'; import { PostgreSQLDatabase } from './sql/postgres.js';
import { MongoDBDatabase } from './sql/mongodb'; import { MongoDBDatabase } from './sql/mongodb.js';
import { buildFrontendRouter } from './routers/_frontend'; import { buildFrontendRouter } from './routers/_frontend.js';
/** /**
* Top-level metadata exports * Top-level metadata exports

@ -1,8 +1,8 @@
import { AssFile, AssUser, NID } from 'ass'; import { AssFile, AssUser, NID } from 'ass';
import { log } from './log'; import { log } from './log.js';
import { UserConfig } from './UserConfig'; import { UserConfig } from './UserConfig.js';
import { DBManager } from './sql/database'; import { DBManager } from './sql/database.js';
/** /**
* Switcher type for exported functions * Switcher type for exported functions
@ -13,10 +13,10 @@ type DataSector = 'files' | 'users';
* database kind -> name mapping * database kind -> name mapping
*/ */
const DBNAMES = { const DBNAMES = {
'mysql': 'MySQL', 'mysql': 'MySQL',
'postgres': 'PostgreSQL', 'postgres': 'PostgreSQL',
'mongodb': 'MongoDB', 'mongodb': 'MongoDB',
'json': 'JSON' 'json': 'JSON'
}; };
export const put = (sector: DataSector, key: NID, data: AssFile | AssUser): Promise<void> => new Promise(async (resolve, reject) => { export const put = (sector: DataSector, key: NID, data: AssFile | AssUser): Promise<void> => new Promise(async (resolve, reject) => {

@ -1,8 +1,8 @@
import { Router } from 'express'; import { Router } from 'express';
import { path } from '@tycrek/joint'; import { path } from '@tycrek/joint';
import { App } from '../app'; import { App } from '../app.js';
import { UserConfig } from '../UserConfig'; import { UserConfig } from '../UserConfig.js';
/** /**
* Builds a basic router for loading a page with frontend JS * Builds a basic router for loading a page with frontend JS
@ -24,7 +24,7 @@ export const buildFrontendRouter = (page: string, onConfigReady = true) => {
// Load frontend JS // Load frontend JS
router.get('/ui.js', (_req, res) => ready() router.get('/ui.js', (_req, res) => ready()
? res.type('text/javascript').sendFile(path.join(`dist-frontend/${page}.mjs`)) ? res.type('text/javascript').sendFile(path.join(`dist/frontend/${page}.mjs`))
: res.sendStatus(403)); : res.sendStatus(403));
return router; return router;

@ -3,16 +3,16 @@ import { AssUser, AssUserNewReq } from 'ass';
import * as bcrypt from 'bcrypt' import * as bcrypt from 'bcrypt'
import { Router, json as BodyParserJson, RequestHandler } from 'express'; import { Router, json as BodyParserJson, RequestHandler } from 'express';
import * as data from '../data'; import * as data from '../data.js';
import { log } from '../log'; import { log } from '../log.js';
import { nanoid } from '../generators'; import { nanoid } from '../generators.js';
import { UserConfig } from '../UserConfig'; import { UserConfig } from '../UserConfig.js';
import { rateLimiterMiddleware, setRateLimiter } from '../ratelimit'; import { rateLimiterMiddleware, setRateLimiter } from '../ratelimit.js';
import { DBManager } from '../sql/database'; import { DBManager } from '../sql/database.js';
import { JSONDatabase } from '../sql/json'; import { JSONDatabase } from '../sql/json.js';
import { MySQLDatabase } from '../sql/mysql'; import { MySQLDatabase } from '../sql/mysql.js';
import { PostgreSQLDatabase } from '../sql/postgres'; import { PostgreSQLDatabase } from '../sql/postgres.js';
import { MongoDBDatabase } from '../sql/mongodb'; import { MongoDBDatabase } from '../sql/mongodb.js';
const router = Router({ caseSensitive: true }); const router = Router({ caseSensitive: true });

@ -6,13 +6,13 @@ import crypto from 'crypto';
import { Router } from 'express'; import { Router } from 'express';
import { Readable } from 'stream'; import { Readable } from 'stream';
import * as data from '../data'; import * as data from '../data.js';
import { log } from '../log'; import { log } from '../log.js';
import { App } from '../app'; import { App } from '../app.js';
import { random } from '../generators'; import { random } from '../generators.js';
import { UserConfig } from '../UserConfig'; import { UserConfig } from '../UserConfig.js';
import { getFileS3, uploadFileS3 } from '../s3'; import { getFileS3, uploadFileS3 } from '../s3.js';
import { rateLimiterMiddleware } from '../ratelimit'; import { rateLimiterMiddleware } from '../ratelimit.js';
const router = Router({ caseSensitive: true }); const router = Router({ caseSensitive: true });

@ -12,8 +12,8 @@ import {
AbortMultipartUploadCommand, AbortMultipartUploadCommand,
} from "@aws-sdk/client-s3"; } from "@aws-sdk/client-s3";
import { log } from './log'; import { log } from './log.js';
import { UserConfig } from './UserConfig'; import { UserConfig } from './UserConfig.js';
const NYR = 'S3 not ready'; const NYR = 'S3 not ready';

@ -3,9 +3,9 @@ import { AssFile, AssUser, FilesSchema, UsersSchema } from 'ass';
import path, { resolve } from 'path'; import path, { resolve } from 'path';
import fs from 'fs-extra'; import fs from 'fs-extra';
import { Database, DatabaseTable, DatabaseValue } from './database'; import { Database, DatabaseTable, DatabaseValue } from './database.js';
import { log } from '../log'; import { log } from '../log.js';
import { nanoid } from '../generators'; import { nanoid } from '../generators.js';
/** /**
* Absolute filepaths for JSON data files * Absolute filepaths for JSON data files
@ -32,62 +32,62 @@ const SECTORMAP = {
} as { [index: string]: string }; } as { [index: string]: string };
const bothWriter = async (files: FilesSchema, users: UsersSchema) => { const bothWriter = async (files: FilesSchema, users: UsersSchema) => {
await fs.writeJson(PATHS.files, files, { spaces: '\t' }); await fs.writeJson(PATHS.files, files, { spaces: '\t' });
await fs.writeJson(PATHS.users, users, { spaces: '\t' }); await fs.writeJson(PATHS.users, users, { spaces: '\t' });
}; };
/** /**
* Creates a JSON file with a given empty data template * Creates a JSON file with a given empty data template
*/ */
const createEmptyJson = (filepath: string, emptyData: any): Promise<void> => new Promise(async (resolve, reject) => { const createEmptyJson = (filepath: string, emptyData: any): Promise<void> => new Promise(async (resolve, reject) => {
try { try {
if (!(await fs.pathExists(filepath))) { if (!(await fs.pathExists(filepath))) {
await fs.ensureFile(filepath); await fs.ensureFile(filepath);
await fs.writeJson(filepath, emptyData, { spaces: '\t' }); await fs.writeJson(filepath, emptyData, { spaces: '\t' });
} }
resolve(void 0); resolve(void 0);
} catch (err) { } catch (err) {
reject(err); reject(err);
} }
}); });
/** /**
* Ensures the data files exist and creates them if required * Ensures the data files exist and creates them if required
*/ */
export const ensureFiles = (): Promise<void> => new Promise(async (resolve, reject) => { export const ensureFiles = (): Promise<void> => new Promise(async (resolve, reject) => {
log.debug('Checking data files'); log.debug('Checking data files');
try { try {
// * Default files.json // * Default files.json
await createEmptyJson(PATHS.files, { await createEmptyJson(PATHS.files, {
files: {}, files: {},
useSql: false, useSql: false,
meta: {} meta: {}
} as FilesSchema); } as FilesSchema);
// * Default users.json // * Default users.json
await createEmptyJson(PATHS.users, { await createEmptyJson(PATHS.users, {
tokens: [], tokens: [],
users: {}, users: {},
cliKey: nanoid(32), cliKey: nanoid(32),
useSql: false, useSql: false,
meta: {} meta: {}
} as UsersSchema); } as UsersSchema);
log.debug('Data files exist'); log.debug('Data files exist');
resolve(); resolve();
} catch (err) { } catch (err) {
log.error('Failed to verify existence of data files'); log.error('Failed to verify existence of data files');
reject(err); reject(err);
} }
}); });
/** /**
* JSON database. i know json isnt sql, shut up. * JSON database. i know json isnt sql, shut up.
*/ */
export class JSONDatabase implements Database { export class JSONDatabase implements Database {
public open(): Promise<void> { return Promise.resolve() } public open(): Promise<void> { return Promise.resolve() }
public close(): Promise<void> { return Promise.resolve() } public close(): Promise<void> { return Promise.resolve() }
public configure(): Promise<void> { public configure(): Promise<void> {
@ -97,7 +97,7 @@ export class JSONDatabase implements Database {
resolve(); resolve();
}); });
} }
public put(table: DatabaseTable, key: string, data: DatabaseValue): Promise<void> { public put(table: DatabaseTable, key: string, data: DatabaseValue): Promise<void> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
if (table == 'assfiles') { if (table == 'assfiles') {
@ -117,19 +117,19 @@ export class JSONDatabase implements Database {
// Save the files // Save the files
await bothWriter(filesJson, usersJson); await bothWriter(filesJson, usersJson);
resolve() resolve()
} else if (table == 'assusers') { } else if (table == 'assusers') {
// ? Local JSON // ? Local JSON
const usersJson = await fs.readJson(PATHS.users) as UsersSchema; const usersJson = await fs.readJson(PATHS.users) as UsersSchema;
// Check if key already exists // Check if key already exists
if (usersJson.users[key] != null) return reject(new Error(`User key ${key} already exists`)); if (usersJson.users[key] != null) return reject(new Error(`User key ${key} already exists`));
// Otherwise add the data // Otherwise add the data
usersJson.users[key] = data as AssUser; usersJson.users[key] = data as AssUser;
await fs.writeJson(PATHS.users, usersJson, { spaces: '\t' }); await fs.writeJson(PATHS.users, usersJson, { spaces: '\t' });
resolve(); resolve();
} }
@ -139,7 +139,7 @@ export class JSONDatabase implements Database {
public get(table: DatabaseTable, key: string): Promise<DatabaseValue | undefined> { public get(table: DatabaseTable, key: string): Promise<DatabaseValue | undefined> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
const data = (await fs.readJson(PATHMAP[table]))[SECTORMAP[table]][key]; const data = (await fs.readJson(PATHMAP[table]))[SECTORMAP[table]][key];
(!data) ? resolve(undefined) : resolve(data); (!data) ? resolve(undefined) : resolve(data);
}); });
} }
@ -148,5 +148,5 @@ export class JSONDatabase implements Database {
const data = (await fs.readJson(PATHMAP[table]))[SECTORMAP[table]]; const data = (await fs.readJson(PATHMAP[table]))[SECTORMAP[table]];
(!data) ? resolve({}) : resolve(data); (!data) ? resolve({}) : resolve(data);
}); });
} }
} }

@ -2,9 +2,9 @@ import { AssFile, AssUser, NID, UploadToken } from 'ass';
import mysql, { Pool } from 'mysql2/promise'; import mysql, { Pool } from 'mysql2/promise';
import { log } from '../log'; import { log } from '../log.js';
import { UserConfig } from '../UserConfig'; import { UserConfig } from '../UserConfig.js';
import { Database, DatabaseTable, DatabaseValue } from './database'; import { Database, DatabaseTable, DatabaseValue } from './database.js';
export class MySQLDatabase implements Database { export class MySQLDatabase implements Database {
private _pool: Pool; private _pool: Pool;
@ -50,7 +50,7 @@ export class MySQLDatabase implements Database {
return issue; return issue;
} }
public open() { return Promise.resolve(); } public open() { return Promise.resolve(); }
public close() { return Promise.resolve(); } public close() { return Promise.resolve(); }
/** /**
@ -144,7 +144,7 @@ VALUES ('${key}', '${JSON.stringify(data)}');
}); });
} }
public get(table: DatabaseTable, key: NID): Promise<DatabaseValue | undefined> { public get(table: DatabaseTable, key: NID): Promise<DatabaseValue | undefined> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
// Run query // Run query
@ -161,7 +161,7 @@ VALUES ('${key}', '${JSON.stringify(data)}');
} }
// todo: unknown if this works // todo: unknown if this works
public getAll(table: DatabaseTable): Promise<{ [index: string]: DatabaseValue }> { public getAll(table: DatabaseTable): Promise<{ [index: string]: DatabaseValue }> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
// Run query // ! this may not work as expected // Run query // ! this may not work as expected

@ -1,43 +1,42 @@
import { PostgresConfiguration } from 'ass'; import { PostgresConfiguration } from 'ass';
import { Client } from 'pg'; import pg from 'pg';
import { log } from '../log.js';
import { log } from '../log'; import { Database, DatabaseTable, DatabaseValue } from './database.js';
import { Database, DatabaseTable, DatabaseValue } from './database'; import { UserConfig } from '../UserConfig.js';
import { UserConfig } from '../UserConfig';
/** /**
* database adapter for postgresql * database adapter for postgresql
*/ */
export class PostgreSQLDatabase implements Database { export class PostgreSQLDatabase implements Database {
private _client: Client; private _client: pg.Client;
/** /**
* validate config * validate config
*/ */
private _validateConfig(): string | undefined { private _validateConfig(): string | undefined {
// make sure the configuration exists // make sure the configuration exists
if (!UserConfig.ready) return 'User configuration not ready'; if (!UserConfig.ready) return 'User configuration not ready';
if (typeof UserConfig.config.database != 'object') return 'PostgreSQL configuration missing'; if (typeof UserConfig.config.database != 'object') return 'PostgreSQL configuration missing';
if (UserConfig.config.database.kind != 'postgres') return 'Database not set to PostgreSQL, but PostgreSQL is in use, something has gone terribly wrong'; if (UserConfig.config.database.kind != 'postgres') return 'Database not set to PostgreSQL, but PostgreSQL is in use, something has gone terribly wrong';
if (typeof UserConfig.config.database.options != 'object') return 'PostgreSQL configuration missing'; if (typeof UserConfig.config.database.options != 'object') return 'PostgreSQL configuration missing';
let config = UserConfig.config.database.options; let config = UserConfig.config.database.options;
// check the postgres config // check the postgres config
const checker = (val: string) => val != null && val !== ''; const checker = (val: string) => val != null && val !== '';
const issue = const issue =
!checker(config.host) ? 'Missing PostgreSQL Host' !checker(config.host) ? 'Missing PostgreSQL Host'
: !checker(config.user) ? 'Missing PostgreSQL User' : !checker(config.user) ? 'Missing PostgreSQL User'
: !checker(config.password) ? 'Missing PostgreSQL Password' : !checker(config.password) ? 'Missing PostgreSQL Password'
: !checker(config.database) ? 'Missing PostgreSQL Database' : !checker(config.database) ? 'Missing PostgreSQL Database'
// ! Blame VS Code for this weird indentation // ! Blame VS Code for this weird indentation
: undefined; : undefined;
return issue; return issue;
} }
public open(): Promise<void> { public open(): Promise<void> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
// config check // config check
@ -48,7 +47,7 @@ export class PostgreSQLDatabase implements Database {
let config = UserConfig.config.database!.options! as PostgresConfiguration; let config = UserConfig.config.database!.options! as PostgresConfiguration;
// set up the client // set up the client
this._client = new Client({ this._client = new pg.Client({
host: config.host, host: config.host,
port: config.port, port: config.port,
user: config.user, user: config.user,
@ -89,7 +88,7 @@ export class PostgreSQLDatabase implements Database {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
await this._client.query( await this._client.query(
`CREATE TABLE IF NOT EXISTS asstables ( `CREATE TABLE IF NOT EXISTS asstables (
name TEXT PRIMARY KEY, name TEXT PRIMARY KEY,
version INT NOT NULL version INT NOT NULL
);`); );`);
@ -104,7 +103,7 @@ export class PostgreSQLDatabase implements Database {
} }
const assTableSchema = '(id TEXT PRIMARY KEY, data JSON NOT NULL)' const assTableSchema = '(id TEXT PRIMARY KEY, data JSON NOT NULL)'
// add missing tables // add missing tables
if (!seenRows.has('assfiles')) { if (!seenRows.has('assfiles')) {
log.warn('PostgreSQL', 'assfiles missing, repairing...') log.warn('PostgreSQL', 'assfiles missing, repairing...')
@ -148,8 +147,8 @@ export class PostgreSQLDatabase implements Database {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const queries = { const queries = {
assfiles: 'INSERT INTO assfiles (id, data) VALUES ($1, $2);', assfiles: 'INSERT INTO assfiles (id, data) VALUES ($1, $2);',
assusers: 'INSERT INTO assusers (id, data) VALUES ($1, $2);', assusers: 'INSERT INTO assusers (id, data) VALUES ($1, $2);',
asstokens: 'INSERT INTO asstokens (id, data) VALUES ($1, $2);' asstokens: 'INSERT INTO asstokens (id, data) VALUES ($1, $2);'
}; };
@ -166,8 +165,8 @@ export class PostgreSQLDatabase implements Database {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const queries = { const queries = {
assfiles: 'SELECT data FROM assfiles WHERE id = $1::text;', assfiles: 'SELECT data FROM assfiles WHERE id = $1::text;',
assusers: 'SELECT data FROM assusers WHERE id = $1::text;', assusers: 'SELECT data FROM assusers WHERE id = $1::text;',
asstokens: 'SELECT data FROM asstokens WHERE id = $1::text;' asstokens: 'SELECT data FROM asstokens WHERE id = $1::text;'
}; };
@ -184,8 +183,8 @@ export class PostgreSQLDatabase implements Database {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const queries = { const queries = {
assfiles: 'SELECT json_object_agg(id, data) AS stuff FROM assfiles;', assfiles: 'SELECT json_object_agg(id, data) AS stuff FROM assfiles;',
assusers: 'SELECT json_object_agg(id, data) AS stuff FROM assusers;', assusers: 'SELECT json_object_agg(id, data) AS stuff FROM assusers;',
asstokens: 'SELECT json_object_agg(id, data) AS stuff FROM asstokens;' asstokens: 'SELECT json_object_agg(id, data) AS stuff FROM asstokens;'
}; };

@ -1,7 +1,7 @@
{ {
"extends": "@tsconfig/node20/tsconfig.json", "extends": "@tsconfig/node20/tsconfig.json",
"compilerOptions": { "compilerOptions": {
"outDir": "../dist-backend", "outDir": "../dist/backend",
"strictPropertyInitialization": false "strictPropertyInitialization": false
}, },
"include": [ "include": [

@ -1,9 +1,10 @@
const fs = require('fs-extra'); import fs from 'fs-extra';
const { path } = require('@tycrek/joint'); import { path } from '@tycrek/joint';
const log = new (require('@tycrek/log').TLog)(); import { TLog } from '@tycrek/log';
const log = new TLog();
const FILES = { const FILES = {
prefix: 'dist-frontend', prefix: 'dist/frontend',
suffix: '.mjs', suffix: '.mjs',
pages: [ pages: [
'setup', 'setup',

@ -1,7 +1,7 @@
{ {
"extends": "@tsconfig/node20/tsconfig.json", "extends": "@tsconfig/node20/tsconfig.json",
"compilerOptions": { "compilerOptions": {
"outDir": "../dist-frontend", "outDir": "../dist/frontend",
"lib": [ "lib": [
"ES2022", "ES2022",
"DOM" "DOM"

@ -2,22 +2,19 @@
"name": "ass", "name": "ass",
"version": "0.15.0-indev", "version": "0.15.0-indev",
"description": "The simple self-hosted ShareX server", "description": "The simple self-hosted ShareX server",
"main": "dist-backend/app.js", "main": "dist/backend/app.js",
"type": "module",
"engines": { "engines": {
"node": "^20", "node": "^20"
"npm": "^10"
}, },
"scripts": { "scripts": {
"dev": "npm run build && npm start", "dev": "npm run build && npm start",
"fresh": "rm -dr .ass-data/ & npm run dev", "fresh": "rm -dr .ass-data/ & npm run dev",
"build": "rm -dr dist-*/ & npm run build:backend && npm run build:frontend", "build": "rm -dr dist/ & npm run build:backend && npm run build:frontend && npm run build:fix-frontend",
"build:backend": "tsc -p backend/", "build:backend": "tsc -p backend/",
"build:frontend": "tsc -p frontend/", "build:frontend": "tsc -p frontend/",
"postbuild:frontend": "node common/fix-frontend-js.js", "build:fix-frontend": "node common/fix-frontend-js.js",
"start": "node dist-backend/app.js", "start": "node dist/backend/app.js"
"cli-setpassword": "node dist/tools/script.setpassword.js",
"cli-testpassword": "node dist/tools/script.testpassword.js",
"cli-adduser": "node dist/tools/script.adduser.js"
}, },
"repository": "github:tycrek/ass", "repository": "github:tycrek/ass",
"keywords": [ "keywords": [
@ -29,13 +26,13 @@
"bugs": "https://github.com/tycrek/ass/issues", "bugs": "https://github.com/tycrek/ass/issues",
"homepage": "https://github.com/tycrek/ass#readme", "homepage": "https://github.com/tycrek/ass#readme",
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.437.0", "@aws-sdk/client-s3": "^3.456.0",
"@shoelace-style/shoelace": "^2.11.2", "@shoelace-style/shoelace": "^2.12.0",
"@tinycreek/postcss-font-magician": "^4.2.0", "@tinycreek/postcss-font-magician": "^4.2.0",
"@tsconfig/node20": "^20.1.2", "@tsconfig/node20": "^20.1.2",
"@tycrek/discord-hookr": "^0.1.0", "@tycrek/discord-hookr": "^0.1.0",
"@tycrek/express-postcss": "^0.4.1", "@tycrek/express-postcss": "^0.4.1",
"@tycrek/joint": "^1.0.0-1", "@tycrek/joint": "1.0.0-1",
"@tycrek/log": "^0.7.5", "@tycrek/log": "^0.7.5",
"@xoi/gps-metadata-remover": "^1.1.2", "@xoi/gps-metadata-remover": "^1.1.2",
"any-shell-escape": "^0.1.1", "any-shell-escape": "^0.1.1",
@ -45,14 +42,14 @@
"cssnano": "^6.0.1", "cssnano": "^6.0.1",
"express": "^4.18.2", "express": "^4.18.2",
"express-busboy": "^10.1.0", "express-busboy": "^10.1.0",
"express-rate-limit": "^7.1.3", "express-rate-limit": "^7.1.4",
"express-session": "^1.17.3", "express-session": "^1.17.3",
"ffmpeg-static": "^5.2.0", "ffmpeg-static": "^5.2.0",
"fs-extra": "^11.1.1", "fs-extra": "^11.1.1",
"luxon": "^3.4.3", "luxon": "^3.4.4",
"memorystore": "^1.6.7", "memorystore": "^1.6.7",
"mongoose": "^8.0.0", "mongoose": "^8.0.0",
"mysql2": "^3.6.2", "mysql2": "^3.6.5",
"node-vibrant": "^3.1.6", "node-vibrant": "^3.1.6",
"pg": "^8.11.3", "pg": "^8.11.3",
"pug": "^3.0.2", "pug": "^3.0.2",
@ -60,17 +57,17 @@
"shoelace-fontawesome-pug": "^6.4.3", "shoelace-fontawesome-pug": "^6.4.3",
"shoelace-pug-loader": "^2.11.0", "shoelace-pug-loader": "^2.11.0",
"tailwindcss": "^3.3.5", "tailwindcss": "^3.3.5",
"typescript": "^5.2.2" "typescript": "^5.3.2"
}, },
"devDependencies": { "devDependencies": {
"@types/bcrypt": "^5.0.1", "@types/bcrypt": "^5.0.2",
"@types/express": "^4.17.20", "@types/express": "^4.17.21",
"@types/express-busboy": "^8.0.2", "@types/express-busboy": "^8.0.3",
"@types/express-session": "^1.17.9", "@types/express-session": "^1.17.10",
"@types/ffmpeg-static": "^3.0.2", "@types/ffmpeg-static": "^3.0.3",
"@types/fs-extra": "^11.0.3", "@types/fs-extra": "^11.0.4",
"@types/luxon": "^3.3.3", "@types/luxon": "^3.3.5",
"@types/node": "^20.8.9", "@types/node": "^20.10.0",
"@types/pg": "^8.10.7" "@types/pg": "^8.10.9"
} }
} }

File diff suppressed because it is too large Load Diff

@ -41,7 +41,7 @@ const migrate = (authFileName = 'auth.json'): Promise<Users> => new Promise(asyn
const oldUsers = fs.readJsonSync(authPath).users as OldUsers; const oldUsers = fs.readJsonSync(authPath).users as OldUsers;
// Create a new users object // Create a new users object
const newUsers: Users = { users: [], meta: {} }; const newUsers: Users = { users: [], meta: {}, cliKey: nanoid(32) };
newUsers.migrated = true; newUsers.migrated = true;
// Loop through each user // Loop through each user
@ -69,38 +69,40 @@ const migrate = (authFileName = 'auth.json'): Promise<Users> => new Promise(asyn
.catch(reject) .catch(reject)
// Migrate the datafile (token => uploader) // Migrate the datafile (token => uploader)
.then(() => data().get()) .then(() => (!data())
.then((fileData: [string, FileData][]) => ? (log.warn('data.json not found. This may be a new install?'), Promise.resolve())
: data().get().then((fileData: [string, FileData][]) =>
// ! A note about this block. // ! A note about this block.
// I know it's gross. But using Promise.all crashes low-spec servers, so I had to do it this way. Sorry. // I know it's gross. But using Promise.all crashes low-spec servers, so I had to do it this way. Sorry.
// Thanks to CoPilot for writing `runQueue` :D // Thanks to CoPilot for writing `runQueue` :D
// Wait for all the deletions and puts to finish // Wait for all the deletions and puts to finish
new Promise((resolve, reject) => { new Promise((resolve, reject) => {
// Create a queue of functions to run // Create a queue of functions to run
const queue = fileData.map(([key, file]) => async () => { const queue = fileData.map(([key, file]) => async () => {
// We need to use `newUsers` because `users` hasn't been re-assigned yet // We need to use `newUsers` because `users` hasn't been re-assigned yet
const user = newUsers.users.find((user) => user.token === file.token!)?.unid ?? ''; // ? This is probably fine const user = newUsers.users.find((user) => user.token === file.token!)?.unid ?? ''; // ? This is probably fine
// Because of the stupid way I wrote papito, we need to DEL before we can PUT // Because of the stupid way I wrote papito, we need to DEL before we can PUT
await data().del(key); await data().del(key);
// PUT the new data // PUT the new data
return data().put(key, { ...file, uploader: user }); return data().put(key, { ...file, uploader: user });
}); });
// Recursively run the queue, hopefully sequentially without running out of memory // Recursively run the queue, hopefully sequentially without running out of memory
const runQueue = (index: number) => { const runQueue = (index: number) => {
if (index >= queue.length) return resolve(void 0); if (index >= queue.length) return resolve(void 0);
queue[index]().then(() => runQueue(index + 1)).catch(reject); queue[index]().then(() => runQueue(index + 1)).catch(reject);
}; };
runQueue(0);
}))
runQueue(0);
}))
.catch((err: any) => log.warn(err.message))
)
// We did it hoofuckingray // We did it hoofuckingray
.then(() => log.success('Migrated all auth & file data to new auth system')) .then(() => log.success('Migrated all auth & file data to new auth system'))
.then(() => resolve(newUsers)) .then(() => resolve(newUsers))

Loading…
Cancel
Save