144 lines
5 KiB
Bash
Executable file
144 lines
5 KiB
Bash
Executable file
#!/bin/bash
|
|
# Create archived backups of zfs datasets
|
|
# Usage: backup-zfs-dataset [OPTIONS] [<dataset> <destination>]
|
|
|
|
# This script makes use of SFTP and authenticates using the .netrc file in the users home directory.
|
|
# You should configure your system accordingly.
|
|
|
|
usage() {
|
|
>&2 printf "Usage: %s [OPTIONS] [<dataset> <destination>]\n" "$0"
|
|
>&2 printf "Options:\n"
|
|
>&2 printf "\t-s --dataset <dataset name> \t Specify dataset name\n"
|
|
>&2 printf "\t-d --destination <path to directory> \t Specify destination\n"
|
|
>&2 printf "\t-b --base \t Create a new base\n"
|
|
>&2 printf "\t-i --incremental \t (Default) Create a new incremental backup\n"
|
|
>&2 printf "\t-c --compression-level <level> \t Specify compression level (integer)\n"
|
|
>&2 printf "\t-m --max-size <size> \t Specify maximum size of archive parts\n"
|
|
>&2 printf "\t-t --tag <tag> \t Provide a name to tag the archive\n"
|
|
exit "${1:-1}"
|
|
}
|
|
|
|
# Get options.
|
|
|
|
while [[ $# -gt 0 ]]; do
|
|
case "${1}" in
|
|
-s | --dataset)
|
|
if ! [ -n "${2}" ]; then
|
|
>&2 printf "Error: Invalid dataset: '%s'\n" "${2}"
|
|
usage
|
|
fi
|
|
dataset="${2}"
|
|
shift 2
|
|
;;
|
|
-d | --destination)
|
|
destination="${2}"
|
|
shift 2
|
|
;;
|
|
-b | --base)
|
|
if [ "${create_base:=1}" -ne 1 ]; then
|
|
>&2 printf "Error: Cannot create base backup when specifying differently.\n"
|
|
usage
|
|
fi
|
|
shift 1
|
|
;;
|
|
-i | --incremental)
|
|
if [ "${create_base:=0}" -ne 0 ]; then
|
|
>&2 printf "Error: Cannot create incremental backup when specifying differently.\n"
|
|
usage
|
|
fi
|
|
shift 1
|
|
;;
|
|
-c | --compression_level)
|
|
if ! [[ "${2}" =~ [[:digit:]] ]]; then
|
|
>&2 printf "Error: Invalid compression level: '%s'\n" "${2}"
|
|
usage
|
|
fi
|
|
compression_level="${2}"
|
|
shift 2
|
|
;;
|
|
-m | --max-size)
|
|
if ! [[ "${2}" =~ [[:digit:]](K|M|G) ]]; then
|
|
>&2 printf "Error: Invalid maximum size: '%s'\n" "${2}"
|
|
usage
|
|
fi
|
|
max_size="${2}"
|
|
shift 2
|
|
;;
|
|
-t | --tag)
|
|
tag="${2}-"
|
|
shift 2
|
|
;;
|
|
*)
|
|
>&2 printf "Error: Invalid option: '%s'\n" "${1}"
|
|
usage
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Check arguments.
|
|
|
|
if [[ -z "${dataset:=${1}}" || -z "${destination:=${2}}" ]]; then
|
|
>&2 printf "Error: You need to specify a dataset and a destination.\n"
|
|
usage
|
|
elif [ -z "${dataset}" ]; then
|
|
>&2 printf "Error: Invalid dataset: '%s'\n" "${1}"
|
|
usage
|
|
fi
|
|
|
|
# Set defaults
|
|
compression_level="${compression_level:=1}"
|
|
max_size="${max_size:=2G}"
|
|
|
|
# Check if you need to make a new base backup
|
|
if [ "${create_base:=0}" -eq 1 ]; then
|
|
snapshot_name="manual-$( date +%Y-%m-%d_%H-%M )"
|
|
output_filename="${destination}/${tag:=}${snapshot_name}.gz"
|
|
# Create ZFS snapshot
|
|
printf "Creating snapshot\n"
|
|
sudo zfs snapshot -r "${dataset}@${snapshot_name}"
|
|
# Compress it
|
|
printf "Backing up now\n"
|
|
sudo zfs send --verbose -R "${dataset}@${snapshot_name}" \
|
|
| gzip "-${compression_level}" --verbose --rsyncable \
|
|
| split - --verbose -b "${max_size}" \
|
|
--filter "curl --netrc -kaT - ftp://${output_filename}"
|
|
# Same as curl --netrc --insecure --append --upload-file
|
|
printf "Written base backup to: '%s'.\n" "${output_filename}"
|
|
printf "Done!\n"
|
|
exit 0
|
|
fi
|
|
|
|
# Working snapshots
|
|
|
|
# Find snapshots
|
|
snapshots="$( find "/mnt/${dataset}/.zfs/snapshot"/* -maxdepth 0 -type d | sed -E 's/.*-([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}-[0-9]{2})/\1\t&/' | sort -n | cut -f 2- | xargs -n1 basename )"
|
|
# Use the two latest snapshots
|
|
from_snapshot="$( tail -n2 <<< "${snapshots}" | head -n1 )"
|
|
to_snapshot="$( tail -n2 <<< "${snapshots}" | tail -n1 )"
|
|
|
|
# Check snapshots existance
|
|
if [ -z "${from_snapshot}" ] || [ -z "${to_snapshot}" ]; then
|
|
>&2 printf "Error: Less than two snapshots could be found:\n"
|
|
>&2 printf " From: '%s'\n" "${from_snapshot}"
|
|
>&2 printf " To: '%s'\n" "${to_snapshot}"
|
|
exit 2
|
|
fi
|
|
|
|
# Abort entire script if anything fails.
|
|
set -e
|
|
|
|
# Backups
|
|
|
|
# Incremental incremental backup.
|
|
printf "Creating incremental backup between '%s' and '%s'\n" "${from_snapshot}" "${to_snapshot}"
|
|
output_filename="${destination}/${tag}${from_snapshot}-${to_snapshot}.gz"
|
|
sudo zfs send --verbose -R -i "@${from_snapshot}" "${dataset}@${to_snapshot}" \
|
|
| gzip "-${compression_level}" --verbose \
|
|
| split - --verbose -b "${max_size}" \
|
|
--filter "curl --netrc -kaT - ftp://${output_filename}"
|
|
# Same as curl --netrc --insecure --append --upload-file
|
|
printf "Written incremental backup to: %s\n" "${output_filename}"
|
|
|
|
# TODO Cleanup
|
|
|
|
printf "Done!\n"
|