db backups
This commit is contained in:
parent
e22841062a
commit
3021f0b6f3
@ -6,7 +6,15 @@ invariant(
|
||||
`Please define the "INVITATION_TOKEN_SECRET" environment variable`,
|
||||
);
|
||||
invariant(typeof process.env.SESSION_SECRET === "string", `Please define the "SESSION_SECRET" environment variable`);
|
||||
invariant(typeof process.env.AWS_SES_REGION === "string", `Please define the "AWS_SES_REGION" environment variable`);
|
||||
invariant(typeof process.env.AWS_REGION === "string", `Please define the "AWS_REGION" environment variable`);
|
||||
invariant(
|
||||
typeof process.env.AWS_S3_ACCESS_KEY_ID === "string",
|
||||
`Please define the "AWS_S3_ACCESS_KEY_ID" environment variable`,
|
||||
);
|
||||
invariant(
|
||||
typeof process.env.AWS_S3_ACCESS_KEY_SECRET === "string",
|
||||
`Please define the "AWS_S3_ACCESS_KEY_SECRET" environment variable`,
|
||||
);
|
||||
invariant(
|
||||
typeof process.env.AWS_SES_ACCESS_KEY_ID === "string",
|
||||
`Please define the "AWS_SES_ACCESS_KEY_ID" environment variable`,
|
||||
@ -41,12 +49,18 @@ export default {
|
||||
sessionSecret: process.env.SESSION_SECRET,
|
||||
encryptionKey: process.env.MASTER_ENCRYPTION_KEY,
|
||||
},
|
||||
awsSes: {
|
||||
awsRegion: process.env.AWS_SES_REGION,
|
||||
aws: {
|
||||
region: process.env.AWS_REGION,
|
||||
ses: {
|
||||
accessKeyId: process.env.AWS_SES_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AWS_SES_ACCESS_KEY_SECRET,
|
||||
fromEmail: process.env.AWS_SES_FROM_EMAIL,
|
||||
},
|
||||
s3: {
|
||||
accessKeyId: process.env.AWS_S3_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AWS_S3_ACCESS_KEY_SECRET,
|
||||
},
|
||||
},
|
||||
fathom: {
|
||||
siteId: process.env.FATHOM_SITE_ID,
|
||||
domain: process.env.FATHOM_CUSTOM_DOMAIN,
|
||||
|
4
app/cron-jobs/daily-backup.ts
Normal file
4
app/cron-jobs/daily-backup.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import { CronJob } from "~/utils/queue.server";
|
||||
import backup from "~/utils/backup-db.server";
|
||||
|
||||
export default CronJob("daily db backup", () => backup("daily"), "0 0 * * *");
|
4
app/cron-jobs/monthly-backup.ts
Normal file
4
app/cron-jobs/monthly-backup.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import { CronJob } from "~/utils/queue.server";
|
||||
import backup from "~/utils/backup-db.server";
|
||||
|
||||
export default CronJob("monthly db backup", () => backup("monthly"), "0 0 1 * *");
|
4
app/cron-jobs/weekly-backup.ts
Normal file
4
app/cron-jobs/weekly-backup.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import { CronJob } from "~/utils/queue.server";
|
||||
import backup from "~/utils/backup-db.server";
|
||||
|
||||
export default CronJob("weekly db backup", () => backup("weekly"), "0 0 * * 0");
|
95
app/utils/backup-db.server.ts
Normal file
95
app/utils/backup-db.server.ts
Normal file
@ -0,0 +1,95 @@
|
||||
import { spawn } from "child_process";
|
||||
import { PassThrough } from "stream";
|
||||
|
||||
import logger from "~/utils/logger.server";
|
||||
import config from "~/config/config.server";
|
||||
import { Credentials, S3 } from "aws-sdk";
|
||||
import sendEmail from "~/utils/mailer.server";
|
||||
|
||||
const credentials = new Credentials({
|
||||
accessKeyId: config.aws.s3.accessKeyId,
|
||||
secretAccessKey: config.aws.s3.secretAccessKey,
|
||||
});
|
||||
|
||||
export const s3 = new S3({ region: config.aws.region, credentials });
|
||||
|
||||
export default async function backup(schedule: "daily" | "weekly" | "monthly") {
|
||||
const s3Bucket = "shellphone-backups";
|
||||
const { database, host, port, user, password } = parseDatabaseUrl(process.env.DATABASE_URL!);
|
||||
const fileName = `${schedule}-${database}.sql.gz`;
|
||||
|
||||
console.log(`Dumping database ${database}`);
|
||||
const pgDumpChild = spawn("pg_dump", [`-U${user}`, `-d${database}`], {
|
||||
env: {
|
||||
...process.env,
|
||||
PGPASSWORD: password,
|
||||
PGHOST: host,
|
||||
PGPORT: port.toString(),
|
||||
},
|
||||
stdio: ["ignore", "pipe", "inherit"],
|
||||
});
|
||||
|
||||
console.log(`Compressing dump "${fileName}"`);
|
||||
const gzippedDumpStream = new PassThrough();
|
||||
const gzipChild = spawn("gzip", { stdio: ["pipe", "pipe", "inherit"] });
|
||||
gzipChild.on("exit", (code) => {
|
||||
if (code !== 0) {
|
||||
return sendEmail({
|
||||
text: `${schedule} backup failed: gzip: Bad exit code (${code})`,
|
||||
html: `${schedule} backup failed: gzip: Bad exit code (${code})`,
|
||||
subject: `${schedule} backup failed: gzip: Bad exit code (${code})`,
|
||||
recipients: ["error@shellphone.app"],
|
||||
});
|
||||
}
|
||||
});
|
||||
pgDumpChild.stdout.pipe(gzipChild.stdin);
|
||||
gzipChild.stdout.pipe(gzippedDumpStream);
|
||||
|
||||
pgDumpChild.on("exit", (code) => {
|
||||
if (code !== 0) {
|
||||
console.log("pg_dump failed, upload aborted");
|
||||
return sendEmail({
|
||||
text: `${schedule} backup failed: pg_dump: Bad exit code (${code})`,
|
||||
html: `${schedule} backup failed: pg_dump: Bad exit code (${code})`,
|
||||
subject: `${schedule} backup failed: pg_dump: Bad exit code (${code})`,
|
||||
recipients: ["error@shellphone.app"],
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`Uploading "${fileName}" to S3 bucket "${s3Bucket}"`);
|
||||
const uploadPromise = s3
|
||||
.upload({
|
||||
Bucket: s3Bucket,
|
||||
Key: fileName,
|
||||
ACL: "private",
|
||||
ContentType: "text/plain",
|
||||
ContentEncoding: "gzip",
|
||||
Body: gzippedDumpStream,
|
||||
})
|
||||
.promise();
|
||||
|
||||
uploadPromise
|
||||
.then(() => console.log(`Successfully uploaded "${fileName}"`))
|
||||
.catch((error) => {
|
||||
logger.error(error);
|
||||
return sendEmail({
|
||||
text: `${schedule} backup failed: ${error}`,
|
||||
html: `${schedule} backup failed: ${error}`,
|
||||
subject: `${schedule} backup failed: ${error}`,
|
||||
recipients: ["error@shellphone.app"],
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function parseDatabaseUrl(databaseUrl: string) {
|
||||
const url = new URL(databaseUrl);
|
||||
|
||||
return {
|
||||
user: url.username,
|
||||
password: url.password,
|
||||
host: url.host,
|
||||
port: Number.parseInt(url.port),
|
||||
database: url.pathname.replace(/^\//, "").replace(/\/$/, ""),
|
||||
} as const;
|
||||
}
|
@ -18,7 +18,7 @@ export default async function sendEmail({ text, html, subject, recipients }: Sen
|
||||
subject,
|
||||
encoding: "UTF-8",
|
||||
to: recipients,
|
||||
from: serverConfig.awsSes.fromEmail,
|
||||
from: serverConfig.aws.ses.fromEmail,
|
||||
};
|
||||
|
||||
if (process.env.NODE_ENV !== "production" || process.env.CI) {
|
||||
@ -27,10 +27,10 @@ export default async function sendEmail({ text, html, subject, recipients }: Sen
|
||||
|
||||
const transporter = createTransport({
|
||||
SES: new SES({
|
||||
region: serverConfig.awsSes.awsRegion,
|
||||
region: serverConfig.aws.region,
|
||||
credentials: new Credentials({
|
||||
accessKeyId: serverConfig.awsSes.accessKeyId,
|
||||
secretAccessKey: serverConfig.awsSes.secretAccessKey,
|
||||
accessKeyId: serverConfig.aws.ses.accessKeyId,
|
||||
secretAccessKey: serverConfig.aws.ses.secretAccessKey,
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user