From 342a619d15b8b0fb8afef8eb41f1abf891a5778f Mon Sep 17 00:00:00 2001 From: Tibo De Peuter Date: Sat, 30 Dec 2023 17:44:37 +0100 Subject: [PATCH] Improve backup script --- scripts/backup-zfs-dataset.sh | 49 ++++++++++++++++------------------- 1 file changed, 22 insertions(+), 27 deletions(-) diff --git a/scripts/backup-zfs-dataset.sh b/scripts/backup-zfs-dataset.sh index db9fbd3..54eaa57 100755 --- a/scripts/backup-zfs-dataset.sh +++ b/scripts/backup-zfs-dataset.sh @@ -2,6 +2,9 @@ # Create archived backups of zfs datasets # Usage: backup-zfs-dataset [OPTIONS] [ ] +# This script makes use of SFTP and authenticates using the .netrc file in the users home directory. +# You should configure your system accordingly. + usage() { >&2 printf "Usage: %s [OPTIONS] [ ]\n" "$0" >&2 printf "Options:\n" @@ -28,22 +31,18 @@ while [[ $# -gt 0 ]]; do shift 2 ;; -d | --destination) - if ! [ -d "${2}" ]; then - >&2 printf "Error: Specified destination does not exist: '%s'\n" "${2}" - usage - fi destination="${2}" shift 2 ;; -b | --base) - if [ "${create_base:=1}" -neq 1 ]; then + if [ "${create_base:=1}" -ne 1 ]; then >&2 printf "Error: Cannot create base backup when specifying differently.\n" usage fi shift 1 ;; -i | --incremental) - if [ "${create_base:=0}" -neq 0 ]; then + if [ "${create_base:=0}" -ne 0 ]; then >&2 printf "Error: Cannot create incremental backup when specifying differently.\n" usage fi @@ -84,9 +83,6 @@ if [[ -z "${dataset:=${1}}" || -z "${destination:=${2}}" ]]; then elif [ -z "${dataset}" ]; then >&2 printf "Error: Invalid dataset: '%s'\n" "${1}" usage -elif ! [ -d "${destination}" ]; then - >&2 printf "Error: Specified destination does not exist: '%s'\n" "${2}" - usage fi # Set defaults @@ -99,12 +95,13 @@ if [ "${create_base:=0}" -eq 1 ]; then output_filename="${destination}/${tag:=}${snapshot_name}.gz" # Create ZFS snapshot printf "Creating snapshot\n" - sudo zfs snapshot "${dataset}@${snapshot_name}" + sudo zfs snapshot -r "${dataset}@${snapshot_name}" # Compress it printf "Backing up now\n" - sudo zfs send --verbose "${dataset}@${snapshot_name}" \ + sudo zfs send --verbose -R "${dataset}@${snapshot_name}" \ | gzip "-${compression_level}" --verbose --rsyncable \ - | split - --verbose -b "${max_size}" "${output_filename}.part." + | split - --verbose -b "${max_size}" \ + --filter "curl --netrc -kaT - sftp://${output_filename}" printf "Written base backup to: '%s'.\n" "${output_filename}" printf "Done!\n" exit 0 @@ -113,21 +110,18 @@ fi # Working snapshots # Find snapshots -snapshot_location="/mnt/${dataset}/.zfs/snapshot" -latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename )" -latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename )" +snapshots="$( find "/mnt/${dataset}/.zfs/snapshot"/* -maxdepth 0 -name 'auto*' -type d | sed -E 's/.*-([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}-[0-9]{2})/\1\t&/' | sort -n | cut -f 2- | xargs -n1 basename )" +# Use the two latest snapshots +from_snapshot="$( tail -n2 <<< "${snapshots}" | head -n1 )" +to_snapshot="$( tail -n2 <<< "${snapshots}" | tail -n1 )" # Check snapshots existance -if ! [ -n "${latest_manual}" ]; then - >&2 printf "Error: No manual snapshot could be found!\n" +if [ -z "${from_snapshot}" ] || [ -z "${to_snapshot}" ]; then + >&2 printf "Error: Less than two snapshots could be found:\n" + >&2 printf " From: '%s'\n" "${from_snapshot}" + >&2 printf " To: '%s'\n" "${to_snapshot}" exit 2 fi -if ! [ -n "${latest_auto}" ]; then - >&2 printf "Error: No automatic snapshot could be found!\n" - exit 2 -fi - -printf "Latest manual snapshot: %s\nLatest auto snapshot: %s\n" "${latest_manual}" "${latest_auto}" # Abort entire script if anything fails. set -e @@ -135,11 +129,12 @@ set -e # Backups # Incremental incremental backup. -printf "Creating incremental backup between %s and %s\n" "${latest_manual}" "${latest_auto}" -output_filename="${destination}/${tag}${latest_manual}-${latest_auto}.gz" -sudo zfs send --verbose -i "@${latest_manual}" "${dataset}@${latest_auto}" \ +printf "Creating incremental backup between '%s' and '%s'\n" "${from_snapshot}" "${to_snapshot}" +output_filename="${destination}/${tag}${from_snapshot}-${to_snapshot}.gz" +sudo zfs send --verbose -R -i "@${from_snapshot}" "${dataset}@${to_snapshot}" \ | gzip "-${compression_level}" --verbose \ - | split - --verbose -b "${max_size}" "${output_filename}.part." + | split - --verbose -b "${max_size}" \ + --filter "curl --netrc -kaT - sftp://${output_filename}" printf "Written incremental backup to: %s\n" "${output_filename}" # TODO Cleanup