bash/backup.sh
2024-07-12 04:13:34 +09:00

52 lines
1.8 KiB
Bash

#!/bin/bash
# Make a new folder at /opt/backup: mkdir -p /opt/backup && touch /opt/backup/backup.log && cd /opt/backup
# Save this file at /opt/backup/backup.sh
# Set a cronjob: 0 3 * * * /bin/bash /opt/backup/backup.sh > /opt/backup/backup.log 2>&1
# Get the relative path of the backup script
backup_script=$(dirname "$realpath $0")
# Loading the Config
rclone_config_name=""
rclone_config_pass=""
s3_bucket_name=""
backup_folder_name="$(date +"%Y_%m_%d")"
# Website and Database
website_folder=""
website_folder_exclude=""
database_name=""
# Remove old db dump
cd /opt/backup
rm /opt/backup/dbbackup-*
rm /opt/backup/website-*
# Generating a database dump backup
# if $database_name exists, then dump the database, else skip
if [ -n "$database_name" ]; then
mariadb-dump -u root $database_name | gzip -c > dbbackup-$backup_folder_name.sql.gz
fi
# if $website_folder_exclude exists, then tar the website folder excluding the $website_folder_exclude
if [ -n "$website_folder_exclude" ]; then
tar --exclude $website_folder_exclude -zcvf website-$backup_folder_name.tar.gz $website_folder
else
tar -zcvf website-$backup_folder_name.tar.gz $website_folder
fi
# if $rclone_config_pass exists, then set the rclone config password, else skip
if [ -n "$rclone_config_pass" ]; then
export RCLONE_CONFIG_PASS=$rclone_config_pass
fi
# Copy the database and website backup
if [ -n "$database_name" ]; then
rclone copy /opt/backup/dbbackup-$backup_folder_name.sql.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name -P --s3-no-check-bucket
fi
rclone copy /opt/backup/website-$backup_folder_name.tar.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name -P --s3-no-check-bucket
# Purge old backups for more than 7 days
rclone purge $rclone_config_name:$s3_bucket_name/$(date -d "7 days ago" "+%Y_%m_%d")