feat: backup mysql and s3storage with script

This commit is contained in:
Methapon2001 2024-07-15 10:50:45 +07:00
parent 3066ec3996
commit 63846f57bd
2 changed files with 983 additions and 54 deletions

View file

@ -1,36 +1,43 @@
import { Body, Controller, Delete, Get, Post, Route, Security } from "tsoa";
import { Client as MinioClient, BucketItem } from "minio";
import HttpError from "../interfaces/http-error";
import HttpStatus from "../interfaces/http-status";
function getEnvVar(name: string) {
const value = process.env[name];
if (!value) throw new Error(`${name} is required.`);
return value;
function getEnvVar(environmentName: string) {
const environmentValue = process.env[environmentName];
if (!environmentValue) throw new Error(`${environmentName} is required.`);
return environmentValue;
}
const WINDMILL_URL = getEnvVar("WINDMILL_URL");
const WINDMILL_WORKSPACE = getEnvVar("WINDMILL_WORKSPACE");
const WINDMILL_BACKUP_SCRIPT_PATH = getEnvVar("WINDMILL_BACKUP_SCRIPT_PATH");
const WINDMILL_RESTORE_SCRIPT_PATH = getEnvVar("WINDMILL_RESTORE_SCRIPT_PATH");
const WINDMILL_BACKUP_FLOW_PATH = getEnvVar("WINDMILL_BACKUP_FLOW_PATH");
const WINDMILL_RESTORE_FLOW_PATH = getEnvVar("WINDMILL_RESTORE_FLOW_PATH");
const WINDMILL_BACKUP_DELTE_SCRIPT_PATH = getEnvVar("WINDMILL_BACKUP_DELTE_SCRIPT_PATH");
const WINDMILL_API_KEY = getEnvVar("WINDMILL_API_KEY");
const DB_HOST = getEnvVar("DB_HOST");
const DB_PORT = process.env.DB_PORT;
const DB_USERNAME = getEnvVar("DB_USERNAME");
const DB_PASSWORD = getEnvVar("DB_PASSWORD");
const MINIO_USE_SSL = getEnvVar("MINIO_USE_SSL");
const MINIO_HOST = getEnvVar("MINIO_HOST");
const MINIO_PORT = process.env.MINIO_PORT;
const MINIO_ACCESS_KEY = getEnvVar("MINIO_ACCESS_KEY");
const MINIO_SECRET_KEY = getEnvVar("MINIO_SECRET_KEY");
const MINIO_BUCKET = getEnvVar("MINIO_BUCKET");
const MINIO_BACKUP_FILE_PREFIX =
process.env.MINIO_BACKUP_FILE_PREFIX?.split("/").filter(Boolean).join("/").concat("/") || "";
const MAIN_MINIO_USE_SSL = getEnvVar("MAIN_MINIO_USE_SSL");
const MAIN_MINIO_HOST = getEnvVar("MAIN_MINIO_HOST");
const MAIN_MINIO_PORT = process.env.MAIN_MINIO_PORT;
const MAIN_MINIO_ACCESS_KEY = getEnvVar("MAIN_MINIO_ACCESS_KEY");
const MAIN_MINIO_SECRET_KEY = getEnvVar("MAIN_MINIO_SECRET_KEY");
const MAIN_MINIO_BUCKET = getEnvVar("MAIN_MINIO_BUCKET");
const BACKUP_MINIO_USE_SSL = getEnvVar("BACKUP_MINIO_USE_SSL");
const BACKUP_MINIO_HOST = getEnvVar("BACKUP_MINIO_HOST");
const BACKUP_MINIO_PORT = process.env.BACKUP_MINIO_PORT;
const BACKUP_MINIO_ACCESS_KEY = getEnvVar("BACKUP_MINIO_ACCESS_KEY");
const BACKUP_MINIO_SECRET_KEY = getEnvVar("BACKUP_MINIO_SECRET_KEY");
const BACKUP_MINIO_BUCKET = getEnvVar("BACKUP_MINIO_BUCKET");
const minio = new MinioClient({
useSSL: MINIO_USE_SSL === "true",
endPoint: MINIO_HOST,
port: +(MINIO_PORT || "9000"),
accessKey: MINIO_ACCESS_KEY,
secretKey: MINIO_SECRET_KEY,
useSSL: BACKUP_MINIO_USE_SSL === "true",
endPoint: BACKUP_MINIO_HOST,
port: +(BACKUP_MINIO_PORT || "9000"),
accessKey: BACKUP_MINIO_ACCESS_KEY,
secretKey: BACKUP_MINIO_SECRET_KEY,
});
@Route("/api/v1/backup")
@ -40,7 +47,7 @@ export class BackupController extends Controller {
async listBackup() {
return await new Promise((resolve, reject) => {
const data: BucketItem[] = [];
const stream = minio.listObjectsV2(MINIO_BUCKET, MINIO_BACKUP_FILE_PREFIX);
const stream = minio.listObjectsV2(BACKUP_MINIO_BUCKET);
stream.on("data", (obj) => data.unshift(obj));
stream.on("end", () =>
resolve(
@ -48,7 +55,7 @@ export class BackupController extends Controller {
"prefix" in v
? []
: {
name: v.name.replace(MINIO_BACKUP_FILE_PREFIX || "", ""),
name: v.name.replace(".sql.gz", ""),
timestamp: v.lastModified,
},
),
@ -61,7 +68,7 @@ export class BackupController extends Controller {
@Get("backup-running-list")
async runningBackupStatus() {
return await fetch(
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/list?running=true&script_path_exact=${WINDMILL_BACKUP_SCRIPT_PATH}`,
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/list?running=true&script_path_exact=${WINDMILL_BACKUP_FLOW_PATH}`,
{
headers: { Authorization: `Bearer ${WINDMILL_API_KEY}` },
},
@ -69,16 +76,16 @@ export class BackupController extends Controller {
const data = await r.json();
if (typeof data === "object" && "error" in data) {
console.error(data);
throw new Error("Backup Error");
throw new Error("Cannot get status.");
}
return data;
return data as Record<string, any>[];
});
}
@Get("restore-running-list")
async runningRestoreStatus() {
return await fetch(
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/list?running=true&script_path_exact=${WINDMILL_RESTORE_SCRIPT_PATH}`,
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/list?running=true&script_path_exact=${WINDMILL_RESTORE_FLOW_PATH}`,
{
headers: { Authorization: `Bearer ${WINDMILL_API_KEY}` },
},
@ -86,16 +93,28 @@ export class BackupController extends Controller {
const data = await r.json();
if (typeof data === "object" && "error" in data) {
console.error(data);
throw new Error("Backup Error");
throw new Error("Cannot get status.");
}
return data;
});
}
@Post("create")
async runBackup() {
async runBackup(@Body() body?: { name?: string }) {
const timestamp = Math.round(Date.now() / 1000);
const name =
body?.name && body.name !== "auto-backup"
? `${timestamp}-${body.name}`
: `${timestamp}-manual`;
const listRunning = await this.runningBackupStatus();
if (!listRunning || listRunning.length > 0) {
throw new HttpError(HttpStatus.NOT_ACCEPTABLE, "Cannot create two backup at the same time.");
}
return await fetch(
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/run/p/${WINDMILL_BACKUP_SCRIPT_PATH}`,
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/run/f/${WINDMILL_BACKUP_FLOW_PATH}`,
{
method: "POST",
headers: {
@ -103,17 +122,26 @@ export class BackupController extends Controller {
"Content-Type": "application/json",
},
body: JSON.stringify({
s3_endpoint: `${MINIO_USE_SSL === "true" ? "https" : "http"}://${MINIO_HOST}${(MINIO_PORT && ":" + MINIO_PORT) || ""}`,
s3_access: MINIO_ACCESS_KEY,
s3_secret: MINIO_SECRET_KEY,
s3_bucket: MINIO_BUCKET,
s3_prefix: MINIO_BACKUP_FILE_PREFIX || "/",
db_host: DB_HOST,
db_port: DB_PORT,
db_user: DB_USERNAME,
db_password: DB_PASSWORD,
backup_filename: "manual",
num_versions_to_keep: 0,
backup_name: name,
storage: {
s3_source_endpoint: `${MAIN_MINIO_USE_SSL === "true" ? "https://" : "http://"}${MAIN_MINIO_HOST}${(MAIN_MINIO_PORT && ":" + MAIN_MINIO_PORT) || ""}`,
s3_source_access: MAIN_MINIO_ACCESS_KEY,
s3_source_secret: MAIN_MINIO_SECRET_KEY,
s3_source_bucket: MAIN_MINIO_BUCKET,
s3_dest_endpoint: `${BACKUP_MINIO_USE_SSL === "true" ? "https" : "http://"}${BACKUP_MINIO_HOST}${(BACKUP_MINIO_PORT && ":" + BACKUP_MINIO_PORT) || ""}`,
s3_dest_access: BACKUP_MINIO_ACCESS_KEY,
s3_dest_secret: BACKUP_MINIO_SECRET_KEY,
},
database: {
s3_endpoint: `${BACKUP_MINIO_USE_SSL === "true" ? "https" : "http://"}${BACKUP_MINIO_HOST}${(BACKUP_MINIO_PORT && ":" + BACKUP_MINIO_PORT) || ""}`,
s3_access: BACKUP_MINIO_ACCESS_KEY,
s3_secret: BACKUP_MINIO_SECRET_KEY,
s3_bucket: BACKUP_MINIO_BUCKET,
db_host: DB_HOST,
db_port: DB_PORT,
db_user: DB_USERNAME,
db_password: DB_PASSWORD,
},
}),
},
).then(async (r) => {
@ -127,9 +155,15 @@ export class BackupController extends Controller {
}
@Post("restore")
async restoreBackup(@Body() body: { filename: string }) {
async restoreBackup(@Body() body: { name: string }) {
const listRunning = await this.runningRestoreStatus();
if (!listRunning || listRunning.length > 0) {
throw new HttpError(HttpStatus.NOT_ACCEPTABLE, "Cannot restore two backup at the same time.");
}
return await fetch(
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/run/p/${WINDMILL_RESTORE_SCRIPT_PATH}`,
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/run/f/${WINDMILL_RESTORE_FLOW_PATH}`,
{
method: "POST",
headers: {
@ -137,16 +171,26 @@ export class BackupController extends Controller {
"Content-Type": "application/json",
},
body: JSON.stringify({
s3_endpoint: `${MINIO_USE_SSL === "true" ? "https" : "http"}://${MINIO_HOST}${(MINIO_PORT && ":" + MINIO_PORT) || ""}`,
s3_access: MINIO_ACCESS_KEY,
s3_secret: MINIO_SECRET_KEY,
s3_bucket: MINIO_BUCKET,
s3_prefix: MINIO_BACKUP_FILE_PREFIX || "/",
db_host: DB_HOST,
db_port: DB_PORT,
db_user: DB_USERNAME,
db_password: DB_PASSWORD,
restore_filename: body.filename,
backup_name: body.name,
storage: {
s3_restore_endpoint: `${MAIN_MINIO_USE_SSL === "true" ? "https://" : "http://"}${MAIN_MINIO_HOST}${(MAIN_MINIO_PORT && ":" + MAIN_MINIO_PORT) || ""}`,
s3_restore_access: MAIN_MINIO_ACCESS_KEY,
s3_restore_secret: MAIN_MINIO_SECRET_KEY,
s3_restore_bucket: MAIN_MINIO_BUCKET,
s3_backup_endpoint: `${BACKUP_MINIO_USE_SSL === "true" ? "https" : "http://"}${BACKUP_MINIO_HOST}${(BACKUP_MINIO_PORT && ":" + BACKUP_MINIO_PORT) || ""}`,
s3_backup_access: BACKUP_MINIO_ACCESS_KEY,
s3_backup_secret: BACKUP_MINIO_SECRET_KEY,
},
database: {
s3_endpoint: `${BACKUP_MINIO_USE_SSL === "true" ? "https" : "http://"}${BACKUP_MINIO_HOST}${(BACKUP_MINIO_PORT && ":" + BACKUP_MINIO_PORT) || ""}`,
s3_access: BACKUP_MINIO_ACCESS_KEY,
s3_secret: BACKUP_MINIO_SECRET_KEY,
s3_bucket: BACKUP_MINIO_BUCKET,
db_host: DB_HOST,
db_port: DB_PORT,
db_user: DB_USERNAME,
db_password: DB_PASSWORD,
},
}),
},
).then(async (r) => {
@ -160,9 +204,30 @@ export class BackupController extends Controller {
}
@Delete("delete")
async deleteBackup(@Body() body: { filename: string }) {
await minio.removeObject(MINIO_BUCKET, MINIO_BACKUP_FILE_PREFIX + body.filename, {
forceDelete: true,
async deleteBackup(@Body() body: { name: string }) {
await fetch(
`${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}/jobs/run_wait_result/p/${WINDMILL_BACKUP_DELTE_SCRIPT_PATH}`,
{
method: "POST",
headers: {
Authorization: `Bearer ${WINDMILL_API_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
backup_name: body.name,
s3_backup_endpoint: `${BACKUP_MINIO_USE_SSL === "true" ? "https://" : "http://"}${BACKUP_MINIO_HOST}${(BACKUP_MINIO_PORT && ":" + BACKUP_MINIO_PORT) || ""}`,
s3_backup_access: BACKUP_MINIO_ACCESS_KEY,
s3_backup_secret: BACKUP_MINIO_SECRET_KEY,
s3_backup_bucket: BACKUP_MINIO_BUCKET,
}),
},
).then(async (r) => {
const data = await r.text();
if (data.includes("error")) {
console.error(data);
throw new Error("Error delete backup.");
}
return data;
});
}
}

864
windmill/scripts.md Normal file
View file

@ -0,0 +1,864 @@
# Database
```bash
# shellcheck shell=bash
# Backup database to s3 bucket
s3_endpoint="$1"
s3_access="$2"
s3_secret="$3"
s3_bucket="$4"
s3_prefix="$5"
s3_prefix=$(echo "$s3_prefix" | sed "s/^\///" ) # trim leading slash
db_host="$6"
db_port="$7"
db_user="$8"
db_password="$9"
backup_filename="${10:-auto-backup}"
if [[ -z $(which mysqldump) ]]; then
apt-get install -y default-mysql-client
fi
if [[ -z $(which aws) ]]; then
python -m pip install awscli
fi
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
mc alias set s3backup "$s3_endpoint" "$s3_access" "$s3_secret"
mc mb --ignore-existing "s3backup/$s3_bucket"
export AWS_ACCESS_KEY_ID="$s3_access"
export AWS_SECRET_ACCESS_KEY="$s3_secret"
# if this is backup
if [ "$backup_filename" == "auto-backup" ]; then
now=$(date "+%s-")
else
now=""
fi
file_ext="sql.gz"
echo "Backing up database and uploading to ${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
mysqldump -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password" --all-databases --add-drop-database --single-transaction --skip-lock-tables \
| gzip \
| aws --endpoint-url "$s3_endpoint" s3 cp - "s3://${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
```
```bash
# shellcheck shell=bash
# Restore database with backup file from s3
s3_endpoint="$1"
s3_access="$2"
s3_secret="$3"
s3_bucket="$4"
s3_prefix="$5"
s3_prefix=$(echo "$s3_prefix" | sed "s/^\///" ) # trim leading slash
db_host="$6"
db_port="$7"
db_user="$8"
db_password="$9"
restore_filename="${10:-20000101000000-auto-backup.sql.gz}"
if [[ -z $(which mysqldump) ]]; then
apt-get install -y default-mysql-client
fi
if [[ -z $(which aws) ]]; then
python -m pip install awscli
fi
export AWS_ACCESS_KEY_ID="$s3_access"
export AWS_SECRET_ACCESS_KEY="$s3_secret"
aws --endpoint-url "$s3_endpoint" s3 cp "s3://${s3_bucket}/${s3_prefix}${restore_filename}" - | zcat | mysql -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password"
echo "success"
```
# S3 Storage (MINIO)
```bash
# shellcheck shell=bash
# Backup
s3_source_endpoint="$1"
s3_source_access="$2"
s3_source_secret="$3"
s3_source_bucket="$4"
s3_dest_endpoint="$5"
s3_dest_access="$6"
s3_dest_secret="$7"
s3_dest_bucket="${8:-auto-backup}"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
# if this is auto backup
if [ "$s3_dest_bucket" == "auto-backup" ]; then
now=$(date "+%s-")
else
now=""
fi
mc alias set s3source "$s3_source_endpoint" "$s3_source_access" "$s3_source_secret"
mc alias set s3dest "$s3_dest_endpoint" "$s3_dest_access" "$s3_dest_secret"
mc mb "s3dest/${now}${s3_dest_bucket}"
mc cp -r "s3source/${s3_source_bucket}" "s3dest/${now}${s3_dest_bucket}"
```
```bash
# shellcheck shell=bash
# Restore
s3_restore_endpoint="$1"
s3_restore_access="$2"
s3_restore_secret="$3"
s3_restore_bucket="$4"
s3_backup_endpoint="$5"
s3_backup_access="$6"
s3_backup_secret="$7"
s3_backup_bucket="$8"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
mc alias set s3restore "$s3_restore_endpoint" "$s3_restore_access" "$s3_restore_secret"
mc alias set s3backup "$s3_backup_endpoint" "$s3_backup_access" "$s3_backup_secret"
mc rb --force "s3restore/${s3_restore_bucket}"
mc mb "s3restore/${s3_restore_bucket}"
mc mirror "s3backup/${s3_backup_bucket}" "s3restore/${s3_restore_bucket}"
```
# Flow
```yaml
summary: Full Backup S3 & MySQL
description: ""
value:
modules:
- id: c
value:
lock: |-
{
"dependencies": {}
}
//bun.lockb
<empty>
type: rawscript
content: >-
export async function main(databaseBackupBucket: string,
s3BackupBucket: string) {
if (databaseBackupBucket === s3BackupBucket) throw new Error("Database backup bucket cannot be the same as backup name as backup name will be used as bucket name.");
return;
}
language: bun
input_transforms:
s3BackupBucket:
expr: "`${flow_input.backup_name}`"
type: javascript
databaseBackupBucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
- id: a
value:
path: f/storage/backup_s3
type: script
input_transforms:
s3_dest_access:
expr: "`${flow_input.storage.s3_dest_access}`"
type: javascript
s3_dest_bucket:
expr: "`${flow_input.backup_name}`"
type: javascript
s3_dest_secret:
expr: "`${flow_input.storage.s3_dest_secret}`"
type: javascript
s3_dest_endpoint:
expr: "`${flow_input.storage.s3_dest_endpoint}`"
type: javascript
s3_source_access:
expr: "`${flow_input.storage.s3_source_access}`"
type: javascript
s3_source_bucket:
expr: "`${flow_input.storage.s3_source_bucket}`"
type: javascript
s3_source_secret:
expr: "`${flow_input.storage.s3_source_secret}`"
type: javascript
s3_source_endpoint:
expr: "`${flow_input.storage.s3_source_endpoint}`"
type: javascript
- id: b
value:
path: f/database/mysql_backup
type: script
input_transforms:
db_host:
expr: "`${flow_input.database.db_host}`"
type: javascript
db_port:
expr: "`${flow_input.database.db_port}`"
type: javascript
db_user:
expr: "`${flow_input.database.db_user}`"
type: javascript
s3_access:
expr: "`${flow_input.database.s3_access}`"
type: javascript
s3_bucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
s3_prefix:
expr: '`${""}`'
type: javascript
s3_secret:
expr: "`${flow_input.database.s3_secret}`"
type: javascript
db_password:
expr: "`${flow_input.database.db_password}`"
type: javascript
s3_endpoint:
expr: "`${flow_input.database.s3_endpoint}`"
type: javascript
backup_filename:
expr: "`${flow_input.backup_name}`"
type: javascript
schema:
$schema: https://json-schema.org/draft/2020-12/schema
properties:
backup_name:
type: string
description: This must be unique or else this will failed.
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
pattern: ^([a-zA-Z0-9\-])+$
database:
nullable: false
required:
- db_host
- db_port
- db_user
- db_password
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
format: ""
type: object
properties:
db_host:
type: string
description: ""
default: ""
db_port:
type: string
description: ""
db_user:
type: string
description: ""
db_password:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_endpoint:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
s3_access:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_secret:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_bucket:
type: string
description: Target bucket that will be used to store compressed sql file.
Bucket must exists or else this will fail.
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order:
- db_host
- db_port
- db_user
- db_password
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
description: Database target to backup.
storage:
nullable: false
required:
- s3_source_endpoint
- s3_source_access
- s3_source_secret
- s3_dest_endpoint
- s3_dest_access
- s3_dest_secret
- s3_source_bucket
format: ""
type: object
properties:
s3_source_endpoint:
type: string
description: ""
default: ""
nullable: false
format: ""
required: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
s3_source_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
s3_source_secret:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
s3_source_bucket:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_dest_endpoint:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order: []
properties: {}
s3_dest_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
s3_dest_secret:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
order:
- s3_source_endpoint
- s3_source_access
- s3_source_secret
- s3_source_bucket
- s3_dest_endpoint
- s3_dest_access
- s3_dest_secret
description: ""
required:
- database
- storage
- backup_name
type: object
order:
- backup_name
- database
- storage
```
```yaml
summary: Full Restore S3 & MySQL
description: ""
value:
modules:
- id: c
value:
tag: ""
lock: |-
{
"dependencies": {}
}
//bun.lockb
<empty>
type: rawscript
content: >-
export async function main(databaseBackupBucket: string,
s3BackupBucket: string) {
if (databaseBackupBucket === s3BackupBucket) throw new Error("Database backup bucket cannot be the same as backup name as backup name will be used as bucket name.");
return;
}
language: bun
input_transforms:
s3BackupBucket:
expr: "`${flow_input.backup_name}`"
type: javascript
databaseBackupBucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
- id: a
value:
path: f/storage/restore_s3
type: script
input_transforms:
s3_backup_access:
expr: "`${flow_input.storage.s3_backup_access}`"
type: javascript
s3_backup_bucket:
expr: "`${flow_input.backup_name}`"
type: javascript
s3_backup_secret:
expr: "`${flow_input.storage.s3_backup_secret}`"
type: javascript
s3_restore_access:
expr: "`${flow_input.storage.s3_restore_access}`"
type: javascript
s3_restore_bucket:
expr: "`${flow_input.storage.s3_restore_bucket}`"
type: javascript
s3_restore_secret:
expr: "`${flow_input.storage.s3_restore_secret}`"
type: javascript
s3_backup_endpoint:
expr: "`${flow_input.storage.s3_backup_endpoint}`"
type: javascript
s3_restore_endpoint:
expr: "`${flow_input.storage.s3_restore_endpoint}`"
type: javascript
- id: b
value:
path: f/database/mysql_restore
type: script
input_transforms:
db_host:
expr: "`${flow_input.database.db_host}`"
type: javascript
db_port:
expr: "`${flow_input.database.db_port}`"
type: javascript
db_user:
expr: "`${flow_input.database.db_user}`"
type: javascript
s3_access:
expr: "`${flow_input.database.s3_access}`"
type: javascript
s3_bucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
s3_prefix:
expr: '`${""}`'
type: javascript
s3_secret:
expr: "`${flow_input.database.s3_secret}`"
type: javascript
db_password:
expr: "`${flow_input.database.db_password}`"
type: javascript
s3_endpoint:
expr: "`${flow_input.database.s3_endpoint}`"
type: javascript
restore_filename:
expr: "`${flow_input.backup_name}.sql.gz`"
type: javascript
schema:
$schema: https://json-schema.org/draft/2020-12/schema
properties:
backup_name:
type: string
description: This must be unique or else this will failed.
default: ""
format: ""
required: []
password: false
nullable: false
enumLabels: {}
disableVariablePicker: false
disableCreate: false
pattern: ^([a-zA-Z0-9\-])+$
database:
nullable: false
required:
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
- db_host
- db_port
- db_user
- db_password
format: ""
type: object
description: Database target to backup.
order:
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
- db_host
- db_port
- db_user
- db_password
properties:
s3_endpoint:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_access:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: true
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_secret:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: true
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_bucket:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_host:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_port:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_user:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_password:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: true
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
storage:
nullable: false
required:
- s3_restore_endpoint
- s3_restore_access
- s3_restore_secret
- s3_backup_endpoint
- s3_backup_access
- s3_restore_bucket
- s3_backup_secret
format: ""
type: object
properties:
s3_restore_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_restore_secret:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_restore_bucket:
type: string
description: ""
default: ""
format: ""
required: []
password: false
nullable: false
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_backup_endpoint:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order: []
properties: {}
s3_backup_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_backup_secret:
default: ""
password: true
nullable: false
required: []
format: ""
type: string
enumLabels: {}
disableVariablePicker: false
disableCreate: false
description: ""
s3_restore_endpoint:
type: string
description: ""
default: ""
nullable: false
format: ""
required: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order:
- s3_restore_endpoint
- s3_restore_access
- s3_restore_secret
- s3_restore_bucket
- s3_backup_endpoint
- s3_backup_access
- s3_backup_secret
description: ""
required:
- database
- storage
- backup_name
type: object
order:
- backup_name
- database
- storage
```
```bash
# shellcheck shell=bash
# Delete Backup
backup_name="$1"
s3_backup_endpoint="$2"
s3_backup_access="$3"
s3_backup_secret="$4"
s3_backup_bucket="$5"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
if [[ "$backup_name" == "$s3_backup_bucket" ]]; then
echo "Backup name and backup bucket cannot be the same."
echo "Database backup is stored in backup bucket."
echo "This will result in database backup lost."
exit 1;
fi
mc alias set s3backup "$s3_backup_endpoint" "$s3_backup_access" "$s3_backup_secret"
# ignore all error such as not found
mc rm "s3backup/${s3_backup_bucket}/${backup_name}.sql.gz" &
mc rb "s3backup/${backup_name}" --force &
wait
```