Skip to content

feat: Add script to restore db from a backup #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
RUN_MODE=backup/restore
97 changes: 57 additions & 40 deletions src/backup.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import { exec, execSync } from "child_process";
import { S3Client, S3ClientConfig, PutObjectCommandInput } from "@aws-sdk/client-s3";
import {
S3Client,
S3ClientConfig,
PutObjectCommandInput,
} from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { createReadStream, unlink, statSync } from "fs";
import { filesize } from "filesize";
Expand All @@ -9,15 +13,15 @@ import os from "os";
import { env } from "./env.js";
import { createMD5 } from "./util.js";

const uploadToS3 = async ({ name, path }: { name: string, path: string }) => {
const uploadToS3 = async ({ name, path }: { name: string; path: string }) => {
console.log("Uploading backup to S3...");

const bucket = env.AWS_S3_BUCKET;

const clientOptions: S3ClientConfig = {
region: env.AWS_S3_REGION,
forcePathStyle: env.AWS_S3_FORCE_PATH_STYLE
}
forcePathStyle: env.AWS_S3_FORCE_PATH_STYLE,
};

if (env.AWS_S3_ENDPOINT) {
console.log(`Using custom endpoint: ${env.AWS_S3_ENDPOINT}`);
Expand All @@ -33,7 +37,7 @@ const uploadToS3 = async ({ name, path }: { name: string, path: string }) => {
Bucket: bucket,
Key: name,
Body: createReadStream(path),
}
};

if (env.SUPPORT_OBJECT_LOCK) {
console.log("MD5 hashing file...");
Expand All @@ -42,57 +46,70 @@ const uploadToS3 = async ({ name, path }: { name: string, path: string }) => {

console.log("Done hashing file");

params.ContentMD5 = Buffer.from(md5Hash, 'hex').toString('base64');
params.ContentMD5 = Buffer.from(md5Hash, "hex").toString("base64");
}

const client = new S3Client(clientOptions);

await new Upload({
client,
params: params
params: params,
}).done();

console.log("Backup uploaded to S3...");
}
};

const dumpToFile = async (filePath: string) => {
console.log("Dumping DB to file...");

await new Promise((resolve, reject) => {
exec(`pg_dump --dbname=${env.BACKUP_DATABASE_URL} --format=tar ${env.BACKUP_OPTIONS} | gzip > ${filePath}`, (error, stdout, stderr) => {
if (error) {
reject({ error: error, stderr: stderr.trimEnd() });
return;
}

// check if archive is valid and contains data
const isValidArchive = (execSync(`gzip -cd ${filePath} | head -c1`).length == 1) ? true : false;
if (isValidArchive == false) {
reject({ error: "Backup archive file is invalid or empty; check for errors above" });
return;
}

// not all text in stderr will be a critical error, print the error / warning
if (stderr != "") {
console.log({ stderr: stderr.trimEnd() });
exec(
`pg_dump --dbname=${env.BACKUP_DATABASE_URL} --format=tar ${env.BACKUP_OPTIONS} | gzip > ${filePath}`,
(error, stdout, stderr) => {
if (error) {
reject({ error: error, stderr: stderr.trimEnd() });
return;
}

// check if archive is valid and contains data
const isValidArchive =
execSync(`gzip -cd ${filePath} | head -c1`).length == 1
? true
: false;
if (isValidArchive == false) {
reject({
error:
"Backup archive file is invalid or empty; check for errors above",
});
return;
}

// not all text in stderr will be a critical error, print the error / warning
if (stderr != "") {
console.log({ stderr: stderr.trimEnd() });
}

console.log("Backup archive file is valid");
console.log("Backup filesize:", filesize(statSync(filePath).size));

// if stderr contains text, let the user know that it was potently just a warning message
if (stderr != "") {
console.log(
`Potential warnings detected; Please ensure the backup file "${path.basename(
filePath
)}" contains all needed data`
);
}

resolve(undefined);
}

console.log("Backup archive file is valid");
console.log("Backup filesize:", filesize(statSync(filePath).size));

// if stderr contains text, let the user know that it was potently just a warning message
if (stderr != "") {
console.log(`Potential warnings detected; Please ensure the backup file "${path.basename(filePath)}" contains all needed data`);
}

resolve(undefined);
});
);
});

console.log("DB dumped to file...");
}
};

const deleteFile = async (path: string) => {
export const deleteFile = async (path: string) => {
console.log("Deleting file...");
await new Promise((resolve, reject) => {
unlink(path, (err) => {
Expand All @@ -101,13 +118,13 @@ const deleteFile = async (path: string) => {
});
resolve(undefined);
});
}
};

export const backup = async () => {
console.log("Initiating DB backup...");

const date = new Date().toISOString();
const timestamp = date.replace(/[:.]+/g, '-');
const timestamp = date.replace(/[:.]+/g, "-");
const filename = `${env.BACKUP_FILE_PREFIX}-${timestamp}.tar.gz`;
const filepath = path.join(os.tmpdir(), filename);

Expand All @@ -116,4 +133,4 @@ export const backup = async () => {
await deleteFile(filepath);

console.log("DB backup complete...");
}
};
55 changes: 35 additions & 20 deletions src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,50 +6,65 @@ export const env = envsafe({
AWS_S3_BUCKET: str(),
AWS_S3_REGION: str(),
BACKUP_DATABASE_URL: str({
desc: 'The connection string of the database to backup.'
desc: "The connection string of the database to backup.",
}),
BACKUP_CRON_SCHEDULE: str({
desc: 'The cron schedule to run the backup on.',
default: '0 5 * * *',
allowEmpty: true
desc: "The cron schedule to run the backup on.",
default: "0 5 * * *",
allowEmpty: true,
}),
AWS_S3_ENDPOINT: str({
desc: 'The S3 custom endpoint you want to use.',
default: '',
desc: "The S3 custom endpoint you want to use.",
default: "",
allowEmpty: true,
}),
AWS_S3_FORCE_PATH_STYLE: bool({
desc: 'Use path style for the endpoint instead of the default subdomain style, useful for MinIO',
desc: "Use path style for the endpoint instead of the default subdomain style, useful for MinIO",
default: false,
allowEmpty: true
allowEmpty: true,
}),
RUN_ON_STARTUP: bool({
desc: 'Run a backup on startup of this application',
desc: "Run a backup on startup of this application",
default: false,
allowEmpty: true,
}),
BACKUP_FILE_PREFIX: str({
desc: 'Prefix to the file name',
default: 'backup',
desc: "Prefix to the file name",
default: "backup",
}),
BUCKET_SUBFOLDER: str({
desc: 'A subfolder to place the backup files in',
default: '',
allowEmpty: true
desc: "A subfolder to place the backup files in",
default: "",
allowEmpty: true,
}),
SINGLE_SHOT_MODE: bool({
desc: 'Run a single backup on start and exit when completed',
desc: "Run a single backup on start and exit when completed",
default: false,
allowEmpty: true,
}),
// This is both time consuming and resource intensive so we leave it disabled by default
SUPPORT_OBJECT_LOCK: bool({
desc: 'Enables support for buckets with object lock by providing an MD5 hash with the backup file',
default: false
desc: "Enables support for buckets with object lock by providing an MD5 hash with the backup file",
default: false,
}),
BACKUP_OPTIONS: str({
desc: 'Any valid pg_dump option.',
default: '',
desc: "Any valid pg_dump option.",
default: "",
allowEmpty: true,
}),
RUN_MODE: str({
choices: ["backup", "restore"],
default: "backup",
desc: "Operation mode: backup or restore",
}),
RESTORE_DATABASE_URL: str({
desc: "The connection string of the database to restore to",
default: "",
allowEmpty: true,
}),
RESTORE_FILE_NAME: str({
desc: "Name of the backup file to restore from S3",
default: "",
allowEmpty: true,
}),
})
});
31 changes: 23 additions & 8 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { CronJob } from "cron";
import { backup } from "./backup.js";
import { restore } from "./restore.js";
import { env } from "./env.js";

console.log("NodeJS Version: " + process.version);
Expand All @@ -11,9 +12,19 @@ const tryBackup = async () => {
console.error("Error while running backup: ", error);
process.exit(1);
}
}
};

if (env.RUN_ON_STARTUP || env.SINGLE_SHOT_MODE) {
if (env.RUN_MODE === "restore") {
console.log("Starting database restore...");
try {
await restore();
console.log("Restore completed successfully");
process.exit(0);
} catch (error) {
console.error("Restore failed:", error);
process.exit(1);
}
} else if (env.RUN_ON_STARTUP || env.SINGLE_SHOT_MODE) {
console.log("Running on start backup...");

await tryBackup();
Expand All @@ -24,10 +35,14 @@ if (env.RUN_ON_STARTUP || env.SINGLE_SHOT_MODE) {
}
}

const job = new CronJob(env.BACKUP_CRON_SCHEDULE, async () => {
await tryBackup();
});

job.start();
const job =
env.RUN_MODE === "backup"
? new CronJob(env.BACKUP_CRON_SCHEDULE, async () => {
await tryBackup();
})
: null;

console.log("Backup cron scheduled...");
if (job) {
job.start();
console.log("Backup cron scheduled...");
}
Loading