feat: selectable database to backup

This can be used to select database exclude system database which will
cause error on import.
This commit is contained in:
Methapon2001 2024-07-16 13:31:32 +07:00
parent f0aba4dd9a
commit c963c040ef
2 changed files with 28 additions and 4 deletions

View file

@ -19,6 +19,7 @@ const DB_HOST = getEnvVar("DB_HOST");
const DB_PORT = process.env.DB_PORT;
const DB_USERNAME = getEnvVar("DB_USERNAME");
const DB_PASSWORD = getEnvVar("DB_PASSWORD");
const DB_LIST = process.env.DB_LIST;
const MAIN_MINIO_USE_SSL = getEnvVar("MAIN_MINIO_USE_SSL");
const MAIN_MINIO_HOST = getEnvVar("MAIN_MINIO_HOST");
const MAIN_MINIO_PORT = process.env.MAIN_MINIO_PORT;
@ -141,6 +142,7 @@ export class BackupController extends Controller {
db_port: DB_PORT,
db_user: DB_USERNAME,
db_password: DB_PASSWORD,
db_list: DB_LIST?.replace(",", " "),
},
}),
},

View file

@ -16,8 +16,9 @@ db_host="$6"
db_port="$7"
db_user="$8"
db_password="$9"
db_list="${10:-DUMP-ALL}"
backup_filename="${10:-auto-backup}"
backup_filename="${11:-auto-backup}"
if [[ -z $(which mysqldump) ]]; then
apt-get install -y default-mysql-client
@ -45,13 +46,26 @@ else
now=""
fi
file_ext="sql.gz"
echo "Backing up database and uploading to ${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
mysqldump -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password" --all-databases --add-drop-database --single-transaction --skip-lock-tables \
| gzip \
| aws --endpoint-url "$s3_endpoint" s3 cp - "s3://${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
# if database list is all
if [ "$db_list" == "DUMP-ALL" ]; then
echo "Dumping all databases..."
mysqldump -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password" --all-databases --add-drop-database --single-transaction --skip-lock-tables \
| gzip \
| aws --endpoint-url "$s3_endpoint" s3 cp - "s3://${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
else
echo "Dumping ${db_list} database..."
db_list=($db_list)
mysqldump -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password" --add-drop-database --single-transaction --skip-lock-tables --databases "${db_list[@]}" \
| gzip \
| aws --endpoint-url "$s3_endpoint" s3 cp - "s3://${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
fi
echo "success"
```
```bash
@ -218,6 +232,10 @@ value:
db_host:
expr: "`${flow_input.database.db_host}`"
type: javascript
db_list:
expr: "`${!!flow_input.database.db_list ? flow_input.database.db_list :
'DUMP-ALL'}`"
type: javascript
db_port:
expr: "`${flow_input.database.db_port}`"
type: javascript
@ -299,6 +317,9 @@ schema:
disableVariablePicker: false
disableCreate: false
password: true
db_list:
type: string
description: ""
s3_endpoint:
type: string
description: ""
@ -357,6 +378,7 @@ schema:
- db_port
- db_user
- db_password
- db_list
- s3_endpoint
- s3_access
- s3_secret