From 33b1bd70f2c69f8d3b9ac2e94ccc4ab86f5dc058 Mon Sep 17 00:00:00 2001 From: tdpeuter Date: Sat, 7 Oct 2023 11:06:10 +0200 Subject: [PATCH] Add ZFS dataset backup script #1 and #22 --- scripts/backup-zfs-dataset.sh | 122 ++++++++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100755 scripts/backup-zfs-dataset.sh diff --git a/scripts/backup-zfs-dataset.sh b/scripts/backup-zfs-dataset.sh new file mode 100755 index 0000000..f93cbf3 --- /dev/null +++ b/scripts/backup-zfs-dataset.sh @@ -0,0 +1,122 @@ +#!/bin/bash +# Create archived backups of zfs datasets, with support for incremental backups from automated snapshots. +# Usage: backup-zfs-dataset [OPTIONS] + +usage() { + >&2 printf "Usage: %s [OPTIONS] \n" "$0" + >&2 printf "Options:\n" + >&2 printf "\t-c --compression-level \t Specify compression level (integer)\n" + >&2 printf "\t-s --dataset \t Specify dataset name\n" + >&2 printf "\t-d --destination \t Specify destination\n" + >&2 printf "\t-m --max-size \t Specify maximum size of archive parts\n" + exit "${1:-1}" +} + +# Get options. + +while [[ $# -gt 0 ]]; do + case "${1}" in + --compression_level | -c) + if ! [[ "${2}" =~ -[[:digit:]] ]]; then + >&2 printf "Error: Invalid compression level: %s\n" "${2}" + usage + fi + compression_level="${2}" + shift 2 + ;; + --dataset | -s) + if ! [ -n "${2}" ]; then + >&2 printf "Error: Invalid dataset: %s\n" "${2}" + usage + fi + dataset="${2}" + shift 2 + ;; + --destination | -d) + if ! [ -d "${2}" ]; then + >&2 printf "Error: Destination directory does not exist: %s\n" "${2}" + usage + fi + destination="${2}" + shift 2 + ;; + --max-size | -m) + if ! [[ "${2}" =~ [[:digit:]](K|M|G) ]]; then + >&2 printf "Error: Invalid maximum size: %s\n" "${2}" + usage + fi + max_size="${2}" + shift 2 + ;; + *) + >&2 printf "Error: Invalid option: %s\n" "${1}" + usage + ;; + esac +done + +# Check arguments. + +if [ -z "${dataset}" ]; then + >&2 printf "Error: You need to specify a dataset.\n" + usage +fi + +# Working snapshots + +# Find snapshots +snapshot_location="/mnt/${dataset}/.zfs/snapshot" +latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )" +latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )" + +# Check snapshots existance +if ! [ -n "${latest_manual}" ]; then + >&2 printf "Error: No manual snapshot could be found!\n" + exit 2 +fi +if ! [ -n "${latest_auto}" ]; then + >&2 printf "Error: No automatic snapshot could be found!\n" + exit 2 +fi + +printf "Latest auto snapshot: %s\nLatest manual snapshot: %s\n" "${latest_auto}" "${latest_manual}" + +# Abort entire script if anything fails. +set -e + +# Backups + +# Base backup. +output_filename="${destination}/${latest_manual}.gz" +existing_backup="$( find "${destination}" -type f -name "${output_filename}.part.[a-z][a-z]" -print )" # -quit if you don't need to know every file. +if ! [ ${existing_backup} ]; then + printf "Info: If you've manually created a new snapshot, you might want to remove the old backups.\n" + printf "Latest manual snapshot was not yet backed up, backing up now.\n" + sudo zfs send --verbose "${dataset}@${latest_manual}" \ + | gzip "${compression_level:='-1'}" --verbose --rsyncable \ + | split - --verbose -b "${max_size}" "${output_filename}.part." + printf "Written manual backup to: %s\n" "${output_filename}" +elif [ "${force_create_manual_backup}" ]; then + # TODO What if the new backup is smaller than the previous? + printf "Removing previous backup files.\n" + rm "${existing_backup}" + printf "Backing up manual snapshot.\n" + sudo zfs send --verbose "${dataset}@${latest_manual}" \ + | gzip "${compression_level:='-1'}" --verbose --rsyncable \ + | split - --verbose -b "${max_size}" "${output_filename}.part." + printf "Written manual backup to: %s\n" "${output_filename}" +else + printf "Found existing backup of manual snapshot: %s\n" "${existing_backup}" +fi + +# Incremental incremental backup. +printf "Creating incremental backup between %s and %s\n" "${latest_manual}" "${latest_auto}" +output_filename="${destination}/${latest_manual}-${latest_auto}.gz" +sudo zfs send -i "@${latest_manual}" "${dataset}@${latest_auto}" \ + | gzip "${compression_level}" --verbose \ + | split - --verbose -b "${max_size:='2G'}" "${output_filename}.part." +printf "Written incremental backup to: %s\n" "${output_filename}" + +# TODO Cleanup + +printf "Done!\n"