This commit is contained in:
Xiufeng Guo 2022-12-19 20:18:51 +11:00
parent a78825fa1a
commit fdc57f0daa
Signed by: showfom
GPG Key ID: 29A7777777777777
2 changed files with 159 additions and 0 deletions

109
backup.rb Normal file
View File

@ -0,0 +1,109 @@
#!/usr/bin/env ruby
require 'logger'
require 'rake'
require 'fileutils'
# apt install ruby genisoimage
# mkdir -p /opt/backup/isoupload && touch /opt/backup/backup.log
# Store all backup files
$backup_dir = '/opt/backup/isoupload'
# Delete all backup files older than $keep_days
$keep_days = 14
# Generate missing backup files newer than $check_days
$check_days = 7
# Path to pack
$pack_path = '/var/www/%<year>d/%<month>02d/%<day>02d'
# Path in ISO
$path_in_iso = '/%<year>d/%<month>02d/%<day>02d'
# ISO file name
$iso_name = '%<year>d/%<month>02d/%<year>d-%<month>02d-%<day>02d.iso'
# ISO volume name (CD label)
$iso_volume = '%<year>d-%<month>02d-%<day>02d'
$s3_subdir = ''
$log_file = '/opt/backup/backup.log'
# $log_file = STDOUT
# Generate backup file for given date
def generate(date)
date_info = {
year: date.year,
month: date.month,
day: date.day,
hour: date.hour,
min: date.min
}
pack_path = $pack_path % date_info
path_in_iso = $path_in_iso % date_info
iso_name = $iso_name % date_info
iso_volume = $iso_volume % date_info
if !File.exist?(pack_path)
$logger.warn "#{pack_path} is missing, skipping."
return
end
Dir.chdir($backup_dir) do
if File.exist?(iso_name)
$logger.info "#{iso_name} exists, skipping."
return
end
FileUtils.mkdir_p(File.dirname(iso_name))
Rake.sh 'ionice', 'genisoimage', '-U', '-J', '-quiet',
'-dir-mode', '555', '-file-mode', '444',
'-uid', '0', '-gid', '0',
'-o', iso_name,
'-V', iso_volume,
'-graft-points', "#{path_in_iso}=#{pack_path}"
if File.exist?(iso_name)
$logger.info "#{iso_name} generated."
end
end
end
def upload
# Pre-configure /root/.rclone.conf file
Rake.sh 'ionice', 'rclone', 'copy', $backup_dir, "scaleway:#{$s3_subdir}", '--transfers', '1'
end
def purge
purge_time = Time.now - 86400 * $keep_days
Dir.chdir($backup_dir) do
# Strict to only *.iso and *.xz
Dir.glob('**/{*.iso,*.xz}') do |file|
mtime = File.mtime(file)
mtime_string = mtime.strftime('%Y-%m-%d %H:%M:%S')
if mtime < purge_time
$logger.info "Deleting #{file} (#{mtime_string})"
# File.unlink(file)
end
end
end
end
raise "check_days must be smaller than keep_days" unless $check_days < $keep_days
raise "check_days must be greater than 0" unless $check_days > 0
$logger = Logger.new($log_file)
$logger.info "Keep #{$keep_days} days, check #{$check_days} days"
# Loop through days
now = Time.now
$check_days.downto(1) do |day|
generate(now - 86400 * day)
end
upload
purge

50
backup.sh Normal file
View File

@ -0,0 +1,50 @@
#!/bin/bash
# Make a new folder at /opt/backup: mkdir -p /opt/backup && touch /opt/backup/backup.log && cd /opt/backup
# Save this file at /opt/backup/backup.sh
# Set a cronjob: 0 3 * * * /bin/bash /opt/backup/backup.sh > /opt/backup/backup.log 2>&1
# Get the relative path of the backup script
backup_script=$(dirname "$realpath $0")
# Loading the Config
rclone_config_name=""
rclone_config_pass=""
s3_bucket_name=""
backup_folder_name="$(date +"%Y_%m_%d_%I_%M_%p")"
# Website and Database
website_folder=""
website_folder_exclude=""
database_name=""
# Remove old db dump
cd /opt/backup
rm /opt/backup/dbbackup-*
rm /opt/backup/website-*
# Generating a database dump backup
# if $database_name exists, then dump the database, else skip
if [ -n "$database_name" ]; then
mysqldump -u root $database_name > dbbackup-$backup_folder_name.sql
tar -zcvf dbbackup-$backup_folder_name.tar.gz dbbackup-$backup_folder_name.sql
fi
# if $website_folder_exclude exists, then tar the website folder excluding the $website_folder_exclude
if [ -n "$website_folder_exclude" ]; then
tar --exclude $website_folder_exclude -zcvf website-$backup_folder_name.tar.gz $website_folder
else
tar -zcvf website-$backup_folder_name.tar.gz $website_folder
fi
# if $rclone_config_pass exists, then set the rclone config password, else skip
if [ -n "$rclone_config_pass" ]; then
export RCLONE_CONFIG_PASS=$rclone_config_pass
fi
# Copy the database and website backup
if [ -n "$database_name" ]; then
rclone copy /opt/backup/dbbackup-$backup_folder_name.tar.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name
fi
rclone copy /opt/backup/website-$backup_folder_name.tar.gz $rclone_config_name:$s3_bucket_name/$backup_folder_name