From 243b7dacc893521a1d8aa3a7cc01c21e8aa16bde Mon Sep 17 00:00:00 2001 From: Tibo De Peuter Date: Tue, 10 Oct 2023 22:33:53 +0200 Subject: [PATCH 1/3] Add environment files to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 09d5c8d..1d26785 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ *_pass.txt +*.env From 6b1078057a3ca70cb7852df9e210577cac3521a2 Mon Sep 17 00:00:00 2001 From: Tibo De Peuter Date: Tue, 10 Oct 2023 22:34:14 +0200 Subject: [PATCH 2/3] Fix backup issues --- scripts/backup-zfs-dataset.sh | 29 +++++++++++++++------------- scripts/nextcloud/backup-database.sh | 9 +++++---- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/scripts/backup-zfs-dataset.sh b/scripts/backup-zfs-dataset.sh index 92d31ae..806d918 100755 --- a/scripts/backup-zfs-dataset.sh +++ b/scripts/backup-zfs-dataset.sh @@ -18,7 +18,7 @@ usage() { while [[ $# -gt 0 ]]; do case "${1}" in -c | --compression_level) - if ! [[ "${2}" =~ -[[:digit:]] ]]; then + if ! [[ "${2}" =~ [[:digit:]] ]]; then >&2 printf "Error: Invalid compression level: '%s'\n" "${2}" usage fi @@ -73,12 +73,16 @@ elif ! [ -d "${destination}" ]; then usage fi +# Set defaults +compression_level="${compression_level:=1}" +max_size="${max_size:=2G}" + # Working snapshots # Find snapshots snapshot_location="/mnt/${dataset}/.zfs/snapshot" -latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )" -latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )" +latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename )" +latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename )" # Check snapshots existance if ! [ -n "${latest_manual}" ]; then @@ -90,7 +94,7 @@ if ! [ -n "${latest_auto}" ]; then exit 2 fi -printf "Latest auto snapshot: %s\nLatest manual snapshot: %s\n" "${latest_auto}" "${latest_manual}" +printf "Latest manual snapshot: %s\nLatest auto snapshot: %s\n" "${latest_manual}" "${latest_auto}" # Abort entire script if anything fails. set -e @@ -99,21 +103,20 @@ set -e # Base backup. output_filename="${destination}/${latest_manual}.gz" -existing_backup="$( find "${destination}" -type f -name "${output_filename}.part.[a-z][a-z]" -print )" # -quit if you don't need to know every file. -if ! [ ${existing_backup} ]; then +existing_backup="$( find "${destination}" -type f -name "${latest_manual}.gz.part.[a-z][a-z]" -print -quit )" +if [ -z ${existing_backup} ]; then printf "Info: If you've manually created a new snapshot, you might want to remove the old backups.\n" printf "Latest manual snapshot was not yet backed up, backing up now.\n" sudo zfs send --verbose "${dataset}@${latest_manual}" \ - | gzip "${compression_level:='-1'}" --verbose --rsyncable \ + | gzip "-${compression_level}" --verbose --rsyncable \ | split - --verbose -b "${max_size}" "${output_filename}.part." printf "Written manual backup to: %s\n" "${output_filename}" elif [ "${force_create_manual_backup}" ]; then - # TODO What if the new backup is smaller than the previous? printf "Removing previous backup files.\n" - rm "${existing_backup}" + find "${destination}" -type f -name "${latest_manual}.gz.part.[a-z][a-z]" -print -delete printf "Backing up manual snapshot.\n" sudo zfs send --verbose "${dataset}@${latest_manual}" \ - | gzip "${compression_level:='-1'}" --verbose --rsyncable \ + | gzip "-${compression_level}" --verbose --rsyncable \ | split - --verbose -b "${max_size}" "${output_filename}.part." printf "Written manual backup to: %s\n" "${output_filename}" else @@ -123,9 +126,9 @@ fi # Incremental incremental backup. printf "Creating incremental backup between %s and %s\n" "${latest_manual}" "${latest_auto}" output_filename="${destination}/${latest_manual}-${latest_auto}.gz" -sudo zfs send -i "@${latest_manual}" "${dataset}@${latest_auto}" \ - | gzip "${compression_level}" --verbose \ - | split - --verbose -b "${max_size:='2G'}" "${output_filename}.part." +sudo zfs send --verbose -i "@${latest_manual}" "${dataset}@${latest_auto}" \ + | gzip "-${compression_level}" --verbose \ + | split - --verbose -b "${max_size}" "${output_filename}.part." printf "Written incremental backup to: %s\n" "${output_filename}" # TODO Cleanup diff --git a/scripts/nextcloud/backup-database.sh b/scripts/nextcloud/backup-database.sh index cd4a934..6105468 100755 --- a/scripts/nextcloud/backup-database.sh +++ b/scripts/nextcloud/backup-database.sh @@ -25,6 +25,7 @@ while getopts ":e:" option; do ;; esac done +shift $(( OPTIND - 1 )) # Check arguments. @@ -59,11 +60,11 @@ docker exec "${base_container}" php occ maintenance:mode --on # Database backup echo 'Backing up database' host_database_backupfile="${destination}/${database_backupfile}" -docker exec --env-file "${env_file:='./nextcloud.env'}" "${database_container}" pg_dump 'nextcloud' -cwv -h 'localhost' -U 'nextcloud' > "${host_database_backupfile}" +docker exec --env-file "${env_file:=.env}" "${database_container}" pg_dump 'nextcloud' -cwv -h 'localhost' -U 'nextcloud' > "${host_database_backupfile}" # Files backup for file in 'config' 'themes'; do - printf 'Copying %s\n' "${file}" + printf "Copying %s\n" "${file}" docker cp -a "${base_container}":"/var/www/html/${file}" "${destination}" done @@ -72,7 +73,7 @@ docker exec "${base_container}" php occ maintenance:mode --off # Backup cleanup # Only keep 30 days of backups -printf 'Clean up old database backups' +printf "Clean up old database backups.\n" find "${destination}" -name '*sqlbkp*' -type f -mtime +30 -print -delete -printf 'Done' +printf "Done\n" From dc37b97e919e67935b0bc44452b05b941d5556f8 Mon Sep 17 00:00:00 2001 From: Tibo De Peuter Date: Tue, 10 Oct 2023 22:36:57 +0200 Subject: [PATCH 3/3] Move and improve vaultwarden backup --- backups/create_vaultwarden_backup.sh | 36 ------------ scripts/vaultwarden/backup_database.sh | 77 ++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 36 deletions(-) delete mode 100755 backups/create_vaultwarden_backup.sh create mode 100755 scripts/vaultwarden/backup_database.sh diff --git a/backups/create_vaultwarden_backup.sh b/backups/create_vaultwarden_backup.sh deleted file mode 100755 index 652da10..0000000 --- a/backups/create_vaultwarden_backup.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash -# Backup script for Vaultwarden in a kubernetes cluster - -BACKUP_DEST='/mnt/PRIVATE_DOCS/BACKUPS/vaultwarden' -PASSFILE='./vaultwarden_pass.txt' - -# Create filename for database -database_backupfile="vaultwarden-sqlbkp_$(date +'%Y%m%d').bak" - -# Retrieve container names -base_container="$( docker ps --format '{{.Names}}' | grep vaultwarden_vaultwarden )" -database_container="$( docker ps --format '{{.Names}}' | grep vaultwarden-postgresql_vaultwarden-postgresql )" - -# Abort entire script if any command fails -set -e - -# Database backup ->&2 echo 'Backing up database' -internal_database_backupfile="/tmp/${database_backupfile}" -# Create backup file in docker container -docker exec --env-file "${PASSFILE}" "${database_container}" pg_dump 'vaultwarden' -cwv -h 'localhost' -U 'vaultwarden' -f "${internal_database_backupfile}" -# Copy backup outside container -docker cp "${database_container}":"${internal_database_backupfile}" "${BACKUP_DEST}" - -# Files backup -for file in 'attachments' 'sends' 'config.json' 'rsa_key.pem' 'rsa_key.pub.pem'; do - >&2 printf 'Copying %s\n' "${file}" - docker cp -a "${base_container}":"/data/${file}" "${BACKUP_DEST}" -done - -# Backup cleanup -# Only keep 30 days of backups, seems about right. ->&2 echo 'Cleaning up old database backups' -find "${BACKUP_DEST}" -name '*sqlbkp*' -type f -mtime +30 -print -delete - ->&2 echo 'Done' diff --git a/scripts/vaultwarden/backup_database.sh b/scripts/vaultwarden/backup_database.sh new file mode 100755 index 0000000..0b0c640 --- /dev/null +++ b/scripts/vaultwarden/backup_database.sh @@ -0,0 +1,77 @@ +#!/bin/bash +# Backup Vaultwarden database in a Kubernetes environment. +# Usage: backup-database [OPTIONS] + +usage() { + >&2 printf "Usage: %s \n" "${0}" + >&2 printf "Options:\n" + >&2 printf "\t-e \t Specify the environment file to use\n" + exit "${1:-1}" +} + +# Get options + +while getopts ":e:" option; do + case "${option}" in + e) + if ! [ -f "${OPTARG}" ]; then + >&2 printf "Error: Specified environment file does not exist: '%s'.\n" "${OPTARG}" + elif ! [ -r "${OPTARG}" ]; then + >&2 printf "Error: Specified environment file is not readable: '%s'.\n" "${OPTARG}" + fi + env_file="${OPTARG}" + ;; + *) + >&2 printf "Error: Invalid option: '%s'.\n" "${option}" + usage + ;; + esac +done +shift $(( OPTIND - 1 )) + +# Check arguments + +if [ $# -ne 1 ]; then + >&2 printf "Error: You need to specify a destination.\n" + usage +elif ! [ -d "${1}" ]; then + >&2 printf "Error: Specified destination does not exist or is not readable : '%s'.\n" "${1}" + usage +else + destination="${1}" +fi + +# Retrieve container names +base_container="$( docker ps --format '{{.Names}}' | grep -E 'vaultwarden-2_vaultwarden-2-[0-9a-z]{10}-[0-9a-z]{5}' )" +database_container="$( docker ps --format '{{.Names}}' | grep postgres_vaultwarden-2-cnpg-main-1 )" + +if ! [[ -n "${base_container}" && -n "${database_container}" ]]; then + >&2 printf "Error: Not all containers could be found.\n" + exit 2 +fi + +# Abort entire script if any command fails +set -e + +# Database backup + +# Filename for database backup +database_backupfile="vaultwarden-sqlbkp_$(date +'%Y%m%d').bak" +host_database_backupfile="${destination}/${database_backupfile}" + +# Create backup file in docker container +echo 'Backing up database' +docker exec --env-file "${env_file:=.env}" "${database_container}" pg_dump 'vaultwarden' -cwv -h 'localhost' -U 'vaultwarden' > "${host_database_backupfile}" + +# Files backup +for file in 'attachments' 'sends' 'rsa_key.pem' 'rsa_key.pub.pem'; do # 'config.json' + printf 'Copying %s\n' "${file}" + docker cp -a "${base_container}":"/data/${file}" "${destination}" +done + +# Backup cleanup +# Only keep 30 days of backups, seems about right. +echo 'Cleaning up old database backups' +find "${destination}" -name '*sqlbkp*' -type f -mtime +30 -print -delete + +echo 'Done'