hrms-api-log/windmill/scripts.md
Methapon2001 4fde3a7988 feat: add backup size metadata to response
This commit add backup size to response object using bash script.

This will also remove minio package as it changed to bash script
instead.
2024-07-16 13:51:18 +07:00

908 lines
23 KiB
Markdown

# Database
```bash
# shellcheck shell=bash
# Backup database to s3 bucket
s3_endpoint="$1"
s3_access="$2"
s3_secret="$3"
s3_bucket="$4"
s3_prefix="$5"
s3_prefix=$(echo "$s3_prefix" | sed "s/^\///" ) # trim leading slash
db_host="$6"
db_port="$7"
db_user="$8"
db_password="$9"
db_list="${10:-DUMP-ALL}"
backup_filename="${11:-auto-backup}"
if [[ -z $(which mysqldump) ]]; then
apt-get install -y default-mysql-client
fi
if [[ -z $(which aws) ]]; then
python -m pip install awscli
fi
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
mc alias set s3backup "$s3_endpoint" "$s3_access" "$s3_secret"
mc mb --ignore-existing "s3backup/$s3_bucket"
export AWS_ACCESS_KEY_ID="$s3_access"
export AWS_SECRET_ACCESS_KEY="$s3_secret"
# if this is backup
if [ "$backup_filename" == "auto-backup" ]; then
now=$(date "+%s-")
else
now=""
fi
file_ext="sql.gz"
echo "Backing up database and uploading to ${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
# if database list is all
if [ "$db_list" == "DUMP-ALL" ]; then
echo "Dumping all databases..."
mysqldump -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password" --all-databases --add-drop-database --single-transaction --skip-lock-tables \
| gzip \
| aws --endpoint-url "$s3_endpoint" s3 cp - "s3://${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
else
echo "Dumping ${db_list} database..."
db_list=($db_list)
mysqldump -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password" --add-drop-database --single-transaction --skip-lock-tables --databases "${db_list[@]}" \
| gzip \
| aws --endpoint-url "$s3_endpoint" s3 cp - "s3://${s3_bucket}/${s3_prefix}${now}${backup_filename}.${file_ext}"
fi
echo "success"
```
```bash
# shellcheck shell=bash
# Restore database with backup file from s3
s3_endpoint="$1"
s3_access="$2"
s3_secret="$3"
s3_bucket="$4"
s3_prefix="$5"
s3_prefix=$(echo "$s3_prefix" | sed "s/^\///" ) # trim leading slash
db_host="$6"
db_port="$7"
db_user="$8"
db_password="$9"
restore_filename="${10:-20000101000000-auto-backup.sql.gz}"
if [[ -z $(which mysqldump) ]]; then
apt-get install -y default-mysql-client
fi
if [[ -z $(which aws) ]]; then
python -m pip install awscli
fi
export AWS_ACCESS_KEY_ID="$s3_access"
export AWS_SECRET_ACCESS_KEY="$s3_secret"
aws --endpoint-url "$s3_endpoint" s3 cp "s3://${s3_bucket}/${s3_prefix}${restore_filename}" - | zcat | mysql -h "$db_host" -P "$db_port" -u "$db_user" -p"$db_password"
echo "success"
```
# S3 Storage (MINIO)
```bash
# shellcheck shell=bash
# Backup
s3_source_endpoint="$1"
s3_source_access="$2"
s3_source_secret="$3"
s3_source_bucket="$4"
s3_dest_endpoint="$5"
s3_dest_access="$6"
s3_dest_secret="$7"
s3_dest_bucket="${8:-auto-backup}"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
# if this is auto backup
if [ "$s3_dest_bucket" == "auto-backup" ]; then
now=$(date "+%s-")
else
now=""
fi
mc alias set s3source "$s3_source_endpoint" "$s3_source_access" "$s3_source_secret"
mc alias set s3dest "$s3_dest_endpoint" "$s3_dest_access" "$s3_dest_secret"
mc mb "s3dest/${now}${s3_dest_bucket}"
mc cp -r "s3source/${s3_source_bucket}" "s3dest/${now}${s3_dest_bucket}"
```
```bash
# shellcheck shell=bash
# Restore
s3_restore_endpoint="$1"
s3_restore_access="$2"
s3_restore_secret="$3"
s3_restore_bucket="$4"
s3_backup_endpoint="$5"
s3_backup_access="$6"
s3_backup_secret="$7"
s3_backup_bucket="$8"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
mc alias set s3restore "$s3_restore_endpoint" "$s3_restore_access" "$s3_restore_secret"
mc alias set s3backup "$s3_backup_endpoint" "$s3_backup_access" "$s3_backup_secret"
mc rb --force "s3restore/${s3_restore_bucket}"
mc mb "s3restore/${s3_restore_bucket}"
mc mirror "s3backup/${s3_backup_bucket}" "s3restore/${s3_restore_bucket}"
```
# Flow
```yaml
summary: Full Backup S3 & MySQL
description: ""
value:
modules:
- id: c
value:
lock: |-
{
"dependencies": {}
}
//bun.lockb
<empty>
type: rawscript
content: >-
export async function main(databaseBackupBucket: string,
s3BackupBucket: string) {
if (databaseBackupBucket === s3BackupBucket) throw new Error("Database backup bucket cannot be the same as backup name as backup name will be used as bucket name.");
return;
}
language: bun
input_transforms:
s3BackupBucket:
expr: "`${flow_input.backup_name}`"
type: javascript
databaseBackupBucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
- id: a
value:
path: f/storage/backup_s3
type: script
input_transforms:
s3_dest_access:
expr: "`${flow_input.storage.s3_dest_access}`"
type: javascript
s3_dest_bucket:
expr: "`${flow_input.backup_name}`"
type: javascript
s3_dest_secret:
expr: "`${flow_input.storage.s3_dest_secret}`"
type: javascript
s3_dest_endpoint:
expr: "`${flow_input.storage.s3_dest_endpoint}`"
type: javascript
s3_source_access:
expr: "`${flow_input.storage.s3_source_access}`"
type: javascript
s3_source_bucket:
expr: "`${flow_input.storage.s3_source_bucket}`"
type: javascript
s3_source_secret:
expr: "`${flow_input.storage.s3_source_secret}`"
type: javascript
s3_source_endpoint:
expr: "`${flow_input.storage.s3_source_endpoint}`"
type: javascript
- id: b
value:
path: f/database/mysql_backup
type: script
input_transforms:
db_host:
expr: "`${flow_input.database.db_host}`"
type: javascript
db_list:
expr: "`${!!flow_input.database.db_list ? flow_input.database.db_list :
'DUMP-ALL'}`"
type: javascript
db_port:
expr: "`${flow_input.database.db_port}`"
type: javascript
db_user:
expr: "`${flow_input.database.db_user}`"
type: javascript
s3_access:
expr: "`${flow_input.database.s3_access}`"
type: javascript
s3_bucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
s3_prefix:
expr: '`${""}`'
type: javascript
s3_secret:
expr: "`${flow_input.database.s3_secret}`"
type: javascript
db_password:
expr: "`${flow_input.database.db_password}`"
type: javascript
s3_endpoint:
expr: "`${flow_input.database.s3_endpoint}`"
type: javascript
backup_filename:
expr: "`${flow_input.backup_name}`"
type: javascript
schema:
$schema: https://json-schema.org/draft/2020-12/schema
properties:
backup_name:
type: string
description: This must be unique or else this will failed.
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
pattern: ^([a-zA-Z0-9\-])+$
database:
nullable: false
required:
- db_host
- db_port
- db_user
- db_password
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
format: ""
type: object
properties:
db_host:
type: string
description: ""
default: ""
db_port:
type: string
description: ""
db_user:
type: string
description: ""
db_password:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
db_list:
type: string
description: ""
s3_endpoint:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
s3_access:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_secret:
type: string
description: ""
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_bucket:
type: string
description: Target bucket that will be used to store compressed sql file.
Bucket must exists or else this will fail.
order: []
properties: {}
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order:
- db_host
- db_port
- db_user
- db_password
- db_list
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
description: Database target to backup.
storage:
nullable: false
required:
- s3_source_endpoint
- s3_source_access
- s3_source_secret
- s3_dest_endpoint
- s3_dest_access
- s3_dest_secret
- s3_source_bucket
format: ""
type: object
properties:
s3_source_endpoint:
type: string
description: ""
default: ""
nullable: false
format: ""
required: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
s3_source_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
s3_source_secret:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
s3_source_bucket:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_dest_endpoint:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order: []
properties: {}
s3_dest_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
s3_dest_secret:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
order: []
properties: {}
order:
- s3_source_endpoint
- s3_source_access
- s3_source_secret
- s3_source_bucket
- s3_dest_endpoint
- s3_dest_access
- s3_dest_secret
description: ""
required:
- database
- storage
- backup_name
type: object
order:
- backup_name
- database
- storage
```
```yaml
summary: Full Restore S3 & MySQL
description: ""
value:
modules:
- id: c
value:
tag: ""
lock: |-
{
"dependencies": {}
}
//bun.lockb
<empty>
type: rawscript
content: >-
export async function main(databaseBackupBucket: string,
s3BackupBucket: string) {
if (databaseBackupBucket === s3BackupBucket) throw new Error("Database backup bucket cannot be the same as backup name as backup name will be used as bucket name.");
return;
}
language: bun
input_transforms:
s3BackupBucket:
expr: "`${flow_input.backup_name}`"
type: javascript
databaseBackupBucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
- id: a
value:
path: f/storage/restore_s3
type: script
input_transforms:
s3_backup_access:
expr: "`${flow_input.storage.s3_backup_access}`"
type: javascript
s3_backup_bucket:
expr: "`${flow_input.backup_name}`"
type: javascript
s3_backup_secret:
expr: "`${flow_input.storage.s3_backup_secret}`"
type: javascript
s3_restore_access:
expr: "`${flow_input.storage.s3_restore_access}`"
type: javascript
s3_restore_bucket:
expr: "`${flow_input.storage.s3_restore_bucket}`"
type: javascript
s3_restore_secret:
expr: "`${flow_input.storage.s3_restore_secret}`"
type: javascript
s3_backup_endpoint:
expr: "`${flow_input.storage.s3_backup_endpoint}`"
type: javascript
s3_restore_endpoint:
expr: "`${flow_input.storage.s3_restore_endpoint}`"
type: javascript
- id: b
value:
path: f/database/mysql_restore
type: script
input_transforms:
db_host:
expr: "`${flow_input.database.db_host}`"
type: javascript
db_port:
expr: "`${flow_input.database.db_port}`"
type: javascript
db_user:
expr: "`${flow_input.database.db_user}`"
type: javascript
s3_access:
expr: "`${flow_input.database.s3_access}`"
type: javascript
s3_bucket:
expr: "`${flow_input.database.s3_bucket}`"
type: javascript
s3_prefix:
expr: '`${""}`'
type: javascript
s3_secret:
expr: "`${flow_input.database.s3_secret}`"
type: javascript
db_password:
expr: "`${flow_input.database.db_password}`"
type: javascript
s3_endpoint:
expr: "`${flow_input.database.s3_endpoint}`"
type: javascript
restore_filename:
expr: "`${flow_input.backup_name}.sql.gz`"
type: javascript
schema:
$schema: https://json-schema.org/draft/2020-12/schema
properties:
backup_name:
type: string
description: This must be unique or else this will failed.
default: ""
format: ""
required: []
password: false
nullable: false
enumLabels: {}
disableVariablePicker: false
disableCreate: false
pattern: ^([a-zA-Z0-9\-])+$
database:
nullable: false
required:
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
- db_host
- db_port
- db_user
- db_password
format: ""
type: object
description: Database target to backup.
order:
- s3_endpoint
- s3_access
- s3_secret
- s3_bucket
- db_host
- db_port
- db_user
- db_password
properties:
s3_endpoint:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_access:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: true
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_secret:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: true
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_bucket:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_host:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_port:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_user:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: false
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
db_password:
type: string
description: ""
default: ""
format: ""
properties: {}
required: []
password: true
nullable: false
order: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
storage:
nullable: false
required:
- s3_restore_endpoint
- s3_restore_access
- s3_restore_secret
- s3_backup_endpoint
- s3_backup_access
- s3_restore_bucket
- s3_backup_secret
format: ""
type: object
properties:
s3_restore_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_restore_secret:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_restore_bucket:
type: string
description: ""
default: ""
format: ""
required: []
password: false
nullable: false
enumLabels: {}
disableVariablePicker: false
disableCreate: false
s3_backup_endpoint:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order: []
properties: {}
s3_backup_access:
type: string
description: ""
format: ""
required: []
nullable: false
default: ""
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: true
s3_backup_secret:
default: ""
password: true
nullable: false
required: []
format: ""
type: string
enumLabels: {}
disableVariablePicker: false
disableCreate: false
description: ""
s3_restore_endpoint:
type: string
description: ""
default: ""
nullable: false
format: ""
required: []
enumLabels: {}
disableVariablePicker: false
disableCreate: false
password: false
order:
- s3_restore_endpoint
- s3_restore_access
- s3_restore_secret
- s3_restore_bucket
- s3_backup_endpoint
- s3_backup_access
- s3_backup_secret
description: ""
required:
- database
- storage
- backup_name
type: object
order:
- backup_name
- database
- storage
```
```bash
# shellcheck shell=bash
# Delete Backup
backup_name="$1"
s3_backup_endpoint="$2"
s3_backup_access="$3"
s3_backup_secret="$4"
s3_backup_bucket="$5"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
if [[ "$backup_name" == "$s3_backup_bucket" ]]; then
echo "Backup name and backup bucket cannot be the same."
echo "Database backup is stored in backup bucket."
echo "This will result in database backup lost."
exit 1;
fi
mc alias set s3backup "$s3_backup_endpoint" "$s3_backup_access" "$s3_backup_secret"
# ignore all error such as not found
mc rm "s3backup/${s3_backup_bucket}/${backup_name}.sql.gz" &
mc rb "s3backup/${backup_name}" --force &
wait
```
````bash
# shellcheck shell=bash
# List Backup
s3_backup_endpoint="$1"
s3_backup_access="$2"
s3_backup_secret="$3"
s3_backup_bucket="$4"
if [[ -z $(which mc) ]]; then
curl https://dl.min.io/client/mc/release/linux-amd64/mc -o /usr/local/bin/mc
chmod +x /usr/local/bin/mc
fi
mc alias set s3backup "$s3_backup_endpoint" "$s3_backup_access" "$s3_backup_secret"
list_database=$(mc ls --json "s3backup/${s3_backup_bucket}" | sed '1s/^/[/;$!s/$/,/;$s/$/]/')
list_bucket=$(mc du -depth 2 --json "s3backup" | sed '1s/^/[/;$!s/$/,/;$s/$/]/')
printf '{"bucket": %s, "database": %s}' "$list_bucket" "$list_database" | jq -c```
````