Update backup.sh

Use `mariadb-dump` instead of `mysqldump`.

Purge old backups for more than 7 days.
This commit is contained in:
Xiufeng Guo 2024-06-12 03:58:14 +09:00
parent a953beb11a
commit b6da0d6f97
Signed by: showfom
GPG Key ID: 29A7777777777777
2 changed files with 11 additions and 9 deletions

View File

@ -11,7 +11,7 @@ backup_script=$(dirname "$realpath $0")
rclone_config_name=""
rclone_config_pass=""
s3_bucket_name=""
backup_folder_name="$(date +"%Y_%m_%d_%I_%M_%p")"
backup_folder_name="$(date +"%Y_%m_%d")"
# Website and Database
website_folder=""
@ -26,8 +26,7 @@ rm /opt/backup/website-*
# Generating a database dump backup
# if $database_name exists, then dump the database, else skip
if [ -n "$database_name" ]; then
mysqldump -u root $database_name > dbbackup-$backup_folder_name.sql
tar -zcvf dbbackup-$backup_folder_name.tar.gz dbbackup-$backup_folder_name.sql
mariadb-dump -u root $database_name | gzip -c > dbbackup-$backup_folder_name.sql.gz
fi
# if $website_folder_exclude exists, then tar the website folder excluding the $website_folder_exclude
@ -47,4 +46,7 @@ if [ -n "$database_name" ]; then
rclone copy /opt/backup/dbbackup-$backup_folder_name.tar.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name
fi
rclone copy /opt/backup/website-$backup_folder_name.tar.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name
rclone copy /opt/backup/website-$backup_folder_name.tar.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name
# Purge old backups for more than 7 days
rclone purge $rclone_config_name:$s3_bucket_name/$(date -d "7 days ago" "+%Y_%m_%d")

View File

@ -469,11 +469,11 @@ function add_update_sh() {
printf "\E[0m"
cat > /root/update.sh << EOF
#!/bin/bash
apt-get update
apt-get upgrade -y
apt-get dist-upgrade -y
apt-get autoclean
apt-get autoremove -y
apt update
apt upgrade -y
apt dist-upgrade -y
apt autoclean
apt autoremove -y
EOF
chmod +x /root/update.sh
/root/update.sh