From 3021f0b6f392864ac2239dbcef615f4921c9c963 Mon Sep 17 00:00:00 2001 From: m5r Date: Sun, 3 Jul 2022 15:42:31 +0200 Subject: [PATCH] db backups --- app/config/config.server.ts | 26 ++++++--- app/cron-jobs/daily-backup.ts | 4 ++ app/cron-jobs/monthly-backup.ts | 4 ++ app/cron-jobs/weekly-backup.ts | 4 ++ app/utils/backup-db.server.ts | 95 +++++++++++++++++++++++++++++++++ app/utils/mailer.server.ts | 8 +-- fly.toml | 2 +- 7 files changed, 132 insertions(+), 11 deletions(-) create mode 100644 app/cron-jobs/daily-backup.ts create mode 100644 app/cron-jobs/monthly-backup.ts create mode 100644 app/cron-jobs/weekly-backup.ts create mode 100644 app/utils/backup-db.server.ts diff --git a/app/config/config.server.ts b/app/config/config.server.ts index 7dab7f4..ad3712d 100644 --- a/app/config/config.server.ts +++ b/app/config/config.server.ts @@ -6,7 +6,15 @@ invariant( `Please define the "INVITATION_TOKEN_SECRET" environment variable`, ); invariant(typeof process.env.SESSION_SECRET === "string", `Please define the "SESSION_SECRET" environment variable`); -invariant(typeof process.env.AWS_SES_REGION === "string", `Please define the "AWS_SES_REGION" environment variable`); +invariant(typeof process.env.AWS_REGION === "string", `Please define the "AWS_REGION" environment variable`); +invariant( + typeof process.env.AWS_S3_ACCESS_KEY_ID === "string", + `Please define the "AWS_S3_ACCESS_KEY_ID" environment variable`, +); +invariant( + typeof process.env.AWS_S3_ACCESS_KEY_SECRET === "string", + `Please define the "AWS_S3_ACCESS_KEY_SECRET" environment variable`, +); invariant( typeof process.env.AWS_SES_ACCESS_KEY_ID === "string", `Please define the "AWS_SES_ACCESS_KEY_ID" environment variable`, @@ -41,11 +49,17 @@ export default { sessionSecret: process.env.SESSION_SECRET, encryptionKey: process.env.MASTER_ENCRYPTION_KEY, }, - awsSes: { - awsRegion: process.env.AWS_SES_REGION, - accessKeyId: process.env.AWS_SES_ACCESS_KEY_ID, - secretAccessKey: process.env.AWS_SES_ACCESS_KEY_SECRET, - fromEmail: process.env.AWS_SES_FROM_EMAIL, + aws: { + region: process.env.AWS_REGION, + ses: { + accessKeyId: process.env.AWS_SES_ACCESS_KEY_ID, + secretAccessKey: process.env.AWS_SES_ACCESS_KEY_SECRET, + fromEmail: process.env.AWS_SES_FROM_EMAIL, + }, + s3: { + accessKeyId: process.env.AWS_S3_ACCESS_KEY_ID, + secretAccessKey: process.env.AWS_S3_ACCESS_KEY_SECRET, + }, }, fathom: { siteId: process.env.FATHOM_SITE_ID, diff --git a/app/cron-jobs/daily-backup.ts b/app/cron-jobs/daily-backup.ts new file mode 100644 index 0000000..0a65e15 --- /dev/null +++ b/app/cron-jobs/daily-backup.ts @@ -0,0 +1,4 @@ +import { CronJob } from "~/utils/queue.server"; +import backup from "~/utils/backup-db.server"; + +export default CronJob("daily db backup", () => backup("daily"), "0 0 * * *"); diff --git a/app/cron-jobs/monthly-backup.ts b/app/cron-jobs/monthly-backup.ts new file mode 100644 index 0000000..a401dae --- /dev/null +++ b/app/cron-jobs/monthly-backup.ts @@ -0,0 +1,4 @@ +import { CronJob } from "~/utils/queue.server"; +import backup from "~/utils/backup-db.server"; + +export default CronJob("monthly db backup", () => backup("monthly"), "0 0 1 * *"); diff --git a/app/cron-jobs/weekly-backup.ts b/app/cron-jobs/weekly-backup.ts new file mode 100644 index 0000000..e4b9800 --- /dev/null +++ b/app/cron-jobs/weekly-backup.ts @@ -0,0 +1,4 @@ +import { CronJob } from "~/utils/queue.server"; +import backup from "~/utils/backup-db.server"; + +export default CronJob("weekly db backup", () => backup("weekly"), "0 0 * * 0"); diff --git a/app/utils/backup-db.server.ts b/app/utils/backup-db.server.ts new file mode 100644 index 0000000..a0019cf --- /dev/null +++ b/app/utils/backup-db.server.ts @@ -0,0 +1,95 @@ +import { spawn } from "child_process"; +import { PassThrough } from "stream"; + +import logger from "~/utils/logger.server"; +import config from "~/config/config.server"; +import { Credentials, S3 } from "aws-sdk"; +import sendEmail from "~/utils/mailer.server"; + +const credentials = new Credentials({ + accessKeyId: config.aws.s3.accessKeyId, + secretAccessKey: config.aws.s3.secretAccessKey, +}); + +export const s3 = new S3({ region: config.aws.region, credentials }); + +export default async function backup(schedule: "daily" | "weekly" | "monthly") { + const s3Bucket = "shellphone-backups"; + const { database, host, port, user, password } = parseDatabaseUrl(process.env.DATABASE_URL!); + const fileName = `${schedule}-${database}.sql.gz`; + + console.log(`Dumping database ${database}`); + const pgDumpChild = spawn("pg_dump", [`-U${user}`, `-d${database}`], { + env: { + ...process.env, + PGPASSWORD: password, + PGHOST: host, + PGPORT: port.toString(), + }, + stdio: ["ignore", "pipe", "inherit"], + }); + + console.log(`Compressing dump "${fileName}"`); + const gzippedDumpStream = new PassThrough(); + const gzipChild = spawn("gzip", { stdio: ["pipe", "pipe", "inherit"] }); + gzipChild.on("exit", (code) => { + if (code !== 0) { + return sendEmail({ + text: `${schedule} backup failed: gzip: Bad exit code (${code})`, + html: `${schedule} backup failed: gzip: Bad exit code (${code})`, + subject: `${schedule} backup failed: gzip: Bad exit code (${code})`, + recipients: ["error@shellphone.app"], + }); + } + }); + pgDumpChild.stdout.pipe(gzipChild.stdin); + gzipChild.stdout.pipe(gzippedDumpStream); + + pgDumpChild.on("exit", (code) => { + if (code !== 0) { + console.log("pg_dump failed, upload aborted"); + return sendEmail({ + text: `${schedule} backup failed: pg_dump: Bad exit code (${code})`, + html: `${schedule} backup failed: pg_dump: Bad exit code (${code})`, + subject: `${schedule} backup failed: pg_dump: Bad exit code (${code})`, + recipients: ["error@shellphone.app"], + }); + } + + console.log(`Uploading "${fileName}" to S3 bucket "${s3Bucket}"`); + const uploadPromise = s3 + .upload({ + Bucket: s3Bucket, + Key: fileName, + ACL: "private", + ContentType: "text/plain", + ContentEncoding: "gzip", + Body: gzippedDumpStream, + }) + .promise(); + + uploadPromise + .then(() => console.log(`Successfully uploaded "${fileName}"`)) + .catch((error) => { + logger.error(error); + return sendEmail({ + text: `${schedule} backup failed: ${error}`, + html: `${schedule} backup failed: ${error}`, + subject: `${schedule} backup failed: ${error}`, + recipients: ["error@shellphone.app"], + }); + }); + }); +} + +function parseDatabaseUrl(databaseUrl: string) { + const url = new URL(databaseUrl); + + return { + user: url.username, + password: url.password, + host: url.host, + port: Number.parseInt(url.port), + database: url.pathname.replace(/^\//, "").replace(/\/$/, ""), + } as const; +} diff --git a/app/utils/mailer.server.ts b/app/utils/mailer.server.ts index ee298a8..b142168 100644 --- a/app/utils/mailer.server.ts +++ b/app/utils/mailer.server.ts @@ -18,7 +18,7 @@ export default async function sendEmail({ text, html, subject, recipients }: Sen subject, encoding: "UTF-8", to: recipients, - from: serverConfig.awsSes.fromEmail, + from: serverConfig.aws.ses.fromEmail, }; if (process.env.NODE_ENV !== "production" || process.env.CI) { @@ -27,10 +27,10 @@ export default async function sendEmail({ text, html, subject, recipients }: Sen const transporter = createTransport({ SES: new SES({ - region: serverConfig.awsSes.awsRegion, + region: serverConfig.aws.region, credentials: new Credentials({ - accessKeyId: serverConfig.awsSes.accessKeyId, - secretAccessKey: serverConfig.awsSes.secretAccessKey, + accessKeyId: serverConfig.aws.ses.accessKeyId, + secretAccessKey: serverConfig.aws.ses.secretAccessKey, }), }), }); diff --git a/fly.toml b/fly.toml index 37da5f7..787529a 100644 --- a/fly.toml +++ b/fly.toml @@ -9,7 +9,7 @@ processes = [] [env] APP_BASE_URL = "https://www.shellphone.app" AWS_SES_FROM_EMAIL = "\"Mokhtar from Shellphone\" " - AWS_SES_REGION = "eu-central-1" + AWS_REGION = "eu-central-1" NODE_ENV = "production" PORT = "8080"