fix: run this sequentially to avoid crashing on low-spec servers, maybe

pull/182/head
tycrek 1 year ago
parent 93dc91a761
commit 084b5fba3a
No known key found for this signature in database
GPG Key ID: FF8A54DCE404885A

@ -70,20 +70,37 @@ const migrate = (authFileName = 'auth.json'): Promise<Users> => new Promise(asyn
// Migrate the datafile (token => uploader)
.then(() => data().get())
.then((fileData: [string, FileData][]) =>
.then((fileData: [string, FileData][]) => {
// ! A note about this block.
// I know it's gross. But using Promise.all crashes low-spec servers, so I had to do it this way. Sorry.
// Thanks to CoPilot for writing `runQueue` :D
// Wait for all the deletions and puts to finish
Promise.all(fileData.map(async ([key, file]) => {
return new Promise((resolve, reject) => {
// Create a queue of functions to run
const queue = fileData.map(([key, file]) => async () => {
// We need to use `newUsers` because `users` hasn't been re-assigned yet
const user = newUsers.users.find((user) => user.token === file.token!)?.unid ?? ''; // ? This is probably fine
// We need to use `newUsers` because `users` hasn't been re-assigned yet
const user = newUsers.users.find((user) => user.token === file.token!)?.unid ?? ''; // ? This is probably fine
// Because of the stupid way I wrote papito, we need to DEL before we can PUT
await data().del(key);
// Because of the stupid way I wrote papito, we need to DEL before we can PUT
await data().del(key);
// PUT the new data
return data().put(key, { ...file, uploader: user });
});
// PUT the new data
return data().put(key, { ...file, uploader: user });
})))
// Recursively run the queue, hopefully sequentially without running out of memory
const runQueue = (index: number) => {
if (index >= queue.length) return resolve(void 0);
queue[index]().then(() => runQueue(index + 1)).catch(reject);
};
runQueue(0);
});
})
// We did it hoofuckingray
.then(() => log.success('Migrated all auth & file data to new auth system'))

Loading…
Cancel
Save