Skip to content

Commit

Permalink
feat: delete stale backups older than 4h, added sqlite cleanup script
Browse files Browse the repository at this point in the history
  • Loading branch information
titanism committed Nov 14, 2023
1 parent fa68ea7 commit c1c4231
Show file tree
Hide file tree
Showing 5 changed files with 182 additions and 4 deletions.
11 changes: 11 additions & 0 deletions ecosystem-sqlite.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,17 @@
"env_production": {
"NODE_ENV": "production"
}
},
{
"name": "sqlite-bree",
"script": "sqlite-bree.js",
"exec_mode": "fork",
"wait_ready": true,
"instances": "1",
"pmx": false,
"env_production": {
"NODE_ENV": "production"
}
}
],
"deploy": {
Expand Down
122 changes: 122 additions & 0 deletions jobs/sqlite-cleanup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
/**
* Copyright (c) Forward Email LLC
* SPDX-License-Identifier: BUSL-1.1
*/

// eslint-disable-next-line import/no-unassigned-import
require('#config/env');

const fs = require('node:fs');
const path = require('node:path');
const process = require('node:process');
const { parentPort } = require('node:worker_threads');

// eslint-disable-next-line import/no-unassigned-import
require('#config/mongoose');

const Graceful = require('@ladjs/graceful');
const Redis = require('@ladjs/redis');
const mongoose = require('mongoose');
const ms = require('ms');
const parseErr = require('parse-err');
const sharedConfig = require('@ladjs/shared-config');

const config = require('#config');
const emailHelper = require('#helpers/email');
const logger = require('#helpers/logger');
const setupMongoose = require('#helpers/setup-mongoose');

const breeSharedConfig = sharedConfig('BREE');
const client = new Redis(breeSharedConfig.redis, logger);

const graceful = new Graceful({
mongooses: [mongoose],
redisClients: [client],
logger
});

// store boolean if the job is cancelled
let isCancelled = false;

// handle cancellation (this is a very simple example)
if (parentPort)
parentPort.once('message', (message) => {
//
// TODO: once we can manipulate concurrency option to p-map
// we could make it `Number.MAX_VALUE` here to speed cancellation up
// <https://github.com/sindresorhus/p-map/issues/28>
//
if (message === 'cancel') {
isCancelled = true;
}
});

graceful.listen();

//
// find all files that end with:
// - `-backup.sqlite`
// - `-backup-wal.sqlite`
// - `-backup-shm.sqlite`
//
//
const AFFIXES = ['-backup', '-backup-wal', '-backup-shm'];

(async () => {
await setupMongoose(logger);

try {
if (isCancelled) return;

const dirents = await fs.promises.readdir('/mnt', {
withFileTypes: true
});

for (const dirent of dirents) {
if (!dirent.isDirectory()) continue;
// eslint-disable-next-line no-await-in-loop
const files = await fs.promises.readdir(path.join('/mnt', dirent.name), {
withFileTypes: true
});
for (const file of files) {
if (!file.isFile()) continue;
if (path.extname(file.name) !== '.sqlite') continue;
const basename = path.basename(file.name, path.extname(file.name));
for (const affix of AFFIXES) {
if (!basename.endsWith(affix)) continue;
const filePath = path.join('/mnt', dirent.name, file.name);
// eslint-disable-next-line no-await-in-loop
const stat = await fs.promises.stat(filePath);
if (!stat.isFile()) continue; // safeguard
// delete any backups that are 4h+ old
if (stat.mtimeMs && stat.mtimeMs <= Date.now() - ms('4h')) {
// eslint-disable-next-line no-await-in-loop
await fs.promises.unlink(filePath);
}

break;
}
}
}
} catch (err) {
await logger.error(err);

await emailHelper({
template: 'alert',
message: {
to: config.email.message.from,
subject: 'SQLite cleanup had an error'
},
locals: {
message: `<pre><code>${JSON.stringify(
parseErr(err),
null,
2
)}</code></pre>`
}
});
}

if (parentPort) parentPort.postMessage('done');
else process.exit(0);
})();
4 changes: 2 additions & 2 deletions smtp-bree.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ const bree = new Bree({
//
// this is a long running job, but we attempt to restart it
// every 30s in case errors (e.g. uncaught exception edge case causes `process.exit()`)
// this job has built-in setInterval for every 10m to unlock emails in queue
// and we will also retry deferred emails and put them back into the queue
// this job is recursive and calls its main function to keep itself running (and sending emails)
// (for putting emails back into queue that are frozen or need to be retried, see jobs/unlock-emails)
//
name: 'send-emails',
interval: '30s',
Expand Down
42 changes: 42 additions & 0 deletions sqlite-bree.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/**
* Copyright (c) Forward Email LLC
* SPDX-License-Identifier: BUSL-1.1
*/

// eslint-disable-next-line import/no-unassigned-import
require('#config/env');

const Bree = require('bree');
const Graceful = require('@ladjs/graceful');

const logger = require('#helpers/logger');

const bree = new Bree({
logger,
jobs: [
{
//
// this is a long running job, but we attempt to restart it
// every 30s in case errors (e.g. uncaught exception edge case causes `process.exit()`)
// this job cleans up any backup artifacts from 1 hr+ ago for 'rekey' and 'backup' sqlite-server cases
// (e.g. the server stopped mid-backup or an error occurred, e.g. ran out of memory)
// and in an attempt to save on disk stoarge, we will run `fs.unlink` on each of these files
//
name: 'sqlite-cleanup',
interval: '30s',
timeout: 0
}
]
});

const graceful = new Graceful({
brees: [bree],
logger
});
graceful.listen();

(async () => {
await bree.start();
})();

logger.info('SQLite bree started', { hide_meta: true });
7 changes: 5 additions & 2 deletions sqlite-server.js
Original file line number Diff line number Diff line change
Expand Up @@ -977,7 +977,7 @@ async function parsePayload(data, ws) {
// create backup
const tmp = path.join(
path.dirname(storagePath),
`${payload.id}.sqlite`
`${payload.id}-backup.sqlite`
);
const results = await db.backup(tmp);
let backup = true;
Expand Down Expand Up @@ -1106,7 +1106,10 @@ async function parsePayload(data, ws) {
storage_location: payload.session.user.storage_location
});
const diskSpace = await checkDiskSpace(storagePath);
tmp = path.join(path.dirname(storagePath), `${payload.id}.sqlite`);
tmp = path.join(
path.dirname(storagePath),
`${payload.id}-backup.sqlite`
);

// <https://github.com/nodejs/node/issues/38006>
const stats = await fs.promises.stat(storagePath);
Expand Down

0 comments on commit c1c4231

Please sign in to comment.