Hugo/scripts/backup-zfs-dataset.sh

145 lines
4.9 KiB
Bash
Raw Permalink Normal View History

#!/bin/bash
2023-11-01 13:14:49 +01:00
# Create archived backups of zfs datasets
# Usage: backup-zfs-dataset [OPTIONS] [<dataset> <destination>]
2023-12-30 17:44:37 +01:00
# This script makes use of SFTP and authenticates using the .netrc file in the users home directory.
# You should configure your system accordingly.
usage() {
2023-10-07 11:20:27 +02:00
>&2 printf "Usage: %s [OPTIONS] [<dataset> <destination>]\n" "$0"
>&2 printf "Options:\n"
>&2 printf "\t-s --dataset <dataset name> \t Specify dataset name\n"
>&2 printf "\t-d --destination <path to directory> \t Specify destination\n"
2023-11-01 13:14:49 +01:00
>&2 printf "\t-b --base \t Create a new base\n"
>&2 printf "\t-i --incremental \t (Default) Create a new incremental backup\n"
>&2 printf "\t-c --compression-level <level> \t Specify compression level (integer)\n"
>&2 printf "\t-m --max-size <size> \t Specify maximum size of archive parts\n"
2023-11-01 13:14:49 +01:00
>&2 printf "\t-t --tag <tag> \t Provide a name to tag the archive\n"
exit "${1:-1}"
}
# Get options.
while [[ $# -gt 0 ]]; do
case "${1}" in
2023-10-07 11:20:27 +02:00
-s | --dataset)
if ! [ -n "${2}" ]; then
2023-10-07 11:20:27 +02:00
>&2 printf "Error: Invalid dataset: '%s'\n" "${2}"
usage
fi
dataset="${2}"
shift 2
;;
2023-10-07 11:20:27 +02:00
-d | --destination)
destination="${2}"
shift 2
;;
2023-11-01 13:14:49 +01:00
-b | --base)
2023-12-30 17:44:37 +01:00
if [ "${create_base:=1}" -ne 1 ]; then
2023-11-01 13:14:49 +01:00
>&2 printf "Error: Cannot create base backup when specifying differently.\n"
usage
fi
shift 1
;;
-i | --incremental)
2023-12-30 17:44:37 +01:00
if [ "${create_base:=0}" -ne 0 ]; then
2023-11-01 13:14:49 +01:00
>&2 printf "Error: Cannot create incremental backup when specifying differently.\n"
usage
fi
shift 1
;;
-c | --compression_level)
if ! [[ "${2}" =~ [[:digit:]] ]]; then
>&2 printf "Error: Invalid compression level: '%s'\n" "${2}"
usage
fi
compression_level="${2}"
shift 2
;;
2023-10-07 11:20:27 +02:00
-m | --max-size)
if ! [[ "${2}" =~ [[:digit:]](K|M|G) ]]; then
2023-10-07 11:20:27 +02:00
>&2 printf "Error: Invalid maximum size: '%s'\n" "${2}"
usage
fi
max_size="${2}"
shift 2
;;
2023-11-01 13:14:49 +01:00
-t | --tag)
tag="${2}-"
shift 2
;;
*)
2023-10-07 11:20:27 +02:00
>&2 printf "Error: Invalid option: '%s'\n" "${1}"
usage
;;
esac
done
# Check arguments.
2023-10-07 11:20:27 +02:00
if [[ -z "${dataset:=${1}}" || -z "${destination:=${2}}" ]]; then
>&2 printf "Error: You need to specify a dataset and a destination.\n"
usage
elif [ -z "${dataset}" ]; then
>&2 printf "Error: Invalid dataset: '%s'\n" "${1}"
usage
fi
2023-10-10 22:34:14 +02:00
# Set defaults
compression_level="${compression_level:=1}"
max_size="${max_size:=2G}"
2023-11-01 13:14:49 +01:00
# Check if you need to make a new base backup
if [ "${create_base:=0}" -eq 1 ]; then
snapshot_name="manual-$( date +%Y-%m-%d_%H-%M )"
output_filename="${destination}/${tag:=}${snapshot_name}.gz"
# Create ZFS snapshot
printf "Creating snapshot\n"
2023-12-30 17:44:37 +01:00
sudo zfs snapshot -r "${dataset}@${snapshot_name}"
2023-11-01 13:14:49 +01:00
# Compress it
printf "Backing up now\n"
2023-12-30 17:44:37 +01:00
sudo zfs send --verbose -R "${dataset}@${snapshot_name}" \
2023-11-01 13:14:49 +01:00
| gzip "-${compression_level}" --verbose --rsyncable \
2023-12-30 17:44:37 +01:00
| split - --verbose -b "${max_size}" \
2024-02-06 15:58:39 +01:00
--filter "curl --netrc -kaT - ftp://${output_filename}"
# Same as curl --netrc --insecure --append --upload-file
2023-11-01 13:14:49 +01:00
printf "Written base backup to: '%s'.\n" "${output_filename}"
printf "Done!\n"
exit 0
fi
# Working snapshots
# Find snapshots
2024-02-06 16:01:32 +01:00
snapshots="$( find "/mnt/${dataset}/.zfs/snapshot"/* -maxdepth 0 -type d | sed -E 's/.*-([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}-[0-9]{2})/\1\t&/' | sort -n | cut -f 2- | xargs -n1 basename )"
2023-12-30 17:44:37 +01:00
# Use the two latest snapshots
from_snapshot="$( tail -n2 <<< "${snapshots}" | head -n1 )"
to_snapshot="$( tail -n2 <<< "${snapshots}" | tail -n1 )"
# Check snapshots existance
2023-12-30 17:44:37 +01:00
if [ -z "${from_snapshot}" ] || [ -z "${to_snapshot}" ]; then
>&2 printf "Error: Less than two snapshots could be found:\n"
>&2 printf " From: '%s'\n" "${from_snapshot}"
>&2 printf " To: '%s'\n" "${to_snapshot}"
exit 2
fi
# Abort entire script if anything fails.
set -e
# Backups
# Incremental incremental backup.
2023-12-30 17:44:37 +01:00
printf "Creating incremental backup between '%s' and '%s'\n" "${from_snapshot}" "${to_snapshot}"
output_filename="${destination}/${tag}${from_snapshot}-${to_snapshot}.gz"
sudo zfs send --verbose -R -i "@${from_snapshot}" "${dataset}@${to_snapshot}" \
2023-10-10 22:34:14 +02:00
| gzip "-${compression_level}" --verbose \
2023-12-30 17:44:37 +01:00
| split - --verbose -b "${max_size}" \
2024-02-06 15:58:39 +01:00
--filter "curl --netrc -kaT - ftp://${output_filename}"
# Same as curl --netrc --insecure --append --upload-file
printf "Written incremental backup to: %s\n" "${output_filename}"
# TODO Cleanup
printf "Done!\n"