Compare commits

..

No commits in common. "dc37b97e919e67935b0bc44452b05b941d5556f8" and "8fc130d0a979c2b7ae488a83c698765d95dea316" have entirely different histories.

5 changed files with 53 additions and 99 deletions

1
.gitignore vendored
View file

@ -1,2 +1 @@
*_pass.txt *_pass.txt
*.env

View file

@ -0,0 +1,36 @@
#!/bin/bash
# Backup script for Vaultwarden in a kubernetes cluster
BACKUP_DEST='/mnt/PRIVATE_DOCS/BACKUPS/vaultwarden'
PASSFILE='./vaultwarden_pass.txt'
# Create filename for database
database_backupfile="vaultwarden-sqlbkp_$(date +'%Y%m%d').bak"
# Retrieve container names
base_container="$( docker ps --format '{{.Names}}' | grep vaultwarden_vaultwarden )"
database_container="$( docker ps --format '{{.Names}}' | grep vaultwarden-postgresql_vaultwarden-postgresql )"
# Abort entire script if any command fails
set -e
# Database backup
>&2 echo 'Backing up database'
internal_database_backupfile="/tmp/${database_backupfile}"
# Create backup file in docker container
docker exec --env-file "${PASSFILE}" "${database_container}" pg_dump 'vaultwarden' -cwv -h 'localhost' -U 'vaultwarden' -f "${internal_database_backupfile}"
# Copy backup outside container
docker cp "${database_container}":"${internal_database_backupfile}" "${BACKUP_DEST}"
# Files backup
for file in 'attachments' 'sends' 'config.json' 'rsa_key.pem' 'rsa_key.pub.pem'; do
>&2 printf 'Copying %s\n' "${file}"
docker cp -a "${base_container}":"/data/${file}" "${BACKUP_DEST}"
done
# Backup cleanup
# Only keep 30 days of backups, seems about right.
>&2 echo 'Cleaning up old database backups'
find "${BACKUP_DEST}" -name '*sqlbkp*' -type f -mtime +30 -print -delete
>&2 echo 'Done'

View file

@ -18,7 +18,7 @@ usage() {
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "${1}" in case "${1}" in
-c | --compression_level) -c | --compression_level)
if ! [[ "${2}" =~ [[:digit:]] ]]; then if ! [[ "${2}" =~ -[[:digit:]] ]]; then
>&2 printf "Error: Invalid compression level: '%s'\n" "${2}" >&2 printf "Error: Invalid compression level: '%s'\n" "${2}"
usage usage
fi fi
@ -73,16 +73,12 @@ elif ! [ -d "${destination}" ]; then
usage usage
fi fi
# Set defaults
compression_level="${compression_level:=1}"
max_size="${max_size:=2G}"
# Working snapshots # Working snapshots
# Find snapshots # Find snapshots
snapshot_location="/mnt/${dataset}/.zfs/snapshot" snapshot_location="/mnt/${dataset}/.zfs/snapshot"
latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename )" latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )"
latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename )" latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )"
# Check snapshots existance # Check snapshots existance
if ! [ -n "${latest_manual}" ]; then if ! [ -n "${latest_manual}" ]; then
@ -94,7 +90,7 @@ if ! [ -n "${latest_auto}" ]; then
exit 2 exit 2
fi fi
printf "Latest manual snapshot: %s\nLatest auto snapshot: %s\n" "${latest_manual}" "${latest_auto}" printf "Latest auto snapshot: %s\nLatest manual snapshot: %s\n" "${latest_auto}" "${latest_manual}"
# Abort entire script if anything fails. # Abort entire script if anything fails.
set -e set -e
@ -103,20 +99,21 @@ set -e
# Base backup. # Base backup.
output_filename="${destination}/${latest_manual}.gz" output_filename="${destination}/${latest_manual}.gz"
existing_backup="$( find "${destination}" -type f -name "${latest_manual}.gz.part.[a-z][a-z]" -print -quit )" existing_backup="$( find "${destination}" -type f -name "${output_filename}.part.[a-z][a-z]" -print )" # -quit if you don't need to know every file.
if [ -z ${existing_backup} ]; then if ! [ ${existing_backup} ]; then
printf "Info: If you've manually created a new snapshot, you might want to remove the old backups.\n" printf "Info: If you've manually created a new snapshot, you might want to remove the old backups.\n"
printf "Latest manual snapshot was not yet backed up, backing up now.\n" printf "Latest manual snapshot was not yet backed up, backing up now.\n"
sudo zfs send --verbose "${dataset}@${latest_manual}" \ sudo zfs send --verbose "${dataset}@${latest_manual}" \
| gzip "-${compression_level}" --verbose --rsyncable \ | gzip "${compression_level:='-1'}" --verbose --rsyncable \
| split - --verbose -b "${max_size}" "${output_filename}.part." | split - --verbose -b "${max_size}" "${output_filename}.part."
printf "Written manual backup to: %s\n" "${output_filename}" printf "Written manual backup to: %s\n" "${output_filename}"
elif [ "${force_create_manual_backup}" ]; then elif [ "${force_create_manual_backup}" ]; then
# TODO What if the new backup is smaller than the previous?
printf "Removing previous backup files.\n" printf "Removing previous backup files.\n"
find "${destination}" -type f -name "${latest_manual}.gz.part.[a-z][a-z]" -print -delete rm "${existing_backup}"
printf "Backing up manual snapshot.\n" printf "Backing up manual snapshot.\n"
sudo zfs send --verbose "${dataset}@${latest_manual}" \ sudo zfs send --verbose "${dataset}@${latest_manual}" \
| gzip "-${compression_level}" --verbose --rsyncable \ | gzip "${compression_level:='-1'}" --verbose --rsyncable \
| split - --verbose -b "${max_size}" "${output_filename}.part." | split - --verbose -b "${max_size}" "${output_filename}.part."
printf "Written manual backup to: %s\n" "${output_filename}" printf "Written manual backup to: %s\n" "${output_filename}"
else else
@ -126,9 +123,9 @@ fi
# Incremental incremental backup. # Incremental incremental backup.
printf "Creating incremental backup between %s and %s\n" "${latest_manual}" "${latest_auto}" printf "Creating incremental backup between %s and %s\n" "${latest_manual}" "${latest_auto}"
output_filename="${destination}/${latest_manual}-${latest_auto}.gz" output_filename="${destination}/${latest_manual}-${latest_auto}.gz"
sudo zfs send --verbose -i "@${latest_manual}" "${dataset}@${latest_auto}" \ sudo zfs send -i "@${latest_manual}" "${dataset}@${latest_auto}" \
| gzip "-${compression_level}" --verbose \ | gzip "${compression_level}" --verbose \
| split - --verbose -b "${max_size}" "${output_filename}.part." | split - --verbose -b "${max_size:='2G'}" "${output_filename}.part."
printf "Written incremental backup to: %s\n" "${output_filename}" printf "Written incremental backup to: %s\n" "${output_filename}"
# TODO Cleanup # TODO Cleanup

View file

@ -25,7 +25,6 @@ while getopts ":e:" option; do
;; ;;
esac esac
done done
shift $(( OPTIND - 1 ))
# Check arguments. # Check arguments.
@ -60,11 +59,11 @@ docker exec "${base_container}" php occ maintenance:mode --on
# Database backup # Database backup
echo 'Backing up database' echo 'Backing up database'
host_database_backupfile="${destination}/${database_backupfile}" host_database_backupfile="${destination}/${database_backupfile}"
docker exec --env-file "${env_file:=.env}" "${database_container}" pg_dump 'nextcloud' -cwv -h 'localhost' -U 'nextcloud' > "${host_database_backupfile}" docker exec --env-file "${env_file:='./nextcloud.env'}" "${database_container}" pg_dump 'nextcloud' -cwv -h 'localhost' -U 'nextcloud' > "${host_database_backupfile}"
# Files backup # Files backup
for file in 'config' 'themes'; do for file in 'config' 'themes'; do
printf "Copying %s\n" "${file}" printf 'Copying %s\n' "${file}"
docker cp -a "${base_container}":"/var/www/html/${file}" "${destination}" docker cp -a "${base_container}":"/var/www/html/${file}" "${destination}"
done done
@ -73,7 +72,7 @@ docker exec "${base_container}" php occ maintenance:mode --off
# Backup cleanup # Backup cleanup
# Only keep 30 days of backups # Only keep 30 days of backups
printf "Clean up old database backups.\n" printf 'Clean up old database backups'
find "${destination}" -name '*sqlbkp*' -type f -mtime +30 -print -delete find "${destination}" -name '*sqlbkp*' -type f -mtime +30 -print -delete
printf "Done\n" printf 'Done'

View file

@ -1,77 +0,0 @@
#!/bin/bash
# Backup Vaultwarden database in a Kubernetes environment.
# Usage: backup-database [OPTIONS] <destination>
usage() {
>&2 printf "Usage: %s <destination>\n" "${0}"
>&2 printf "Options:\n"
>&2 printf "\t-e \t Specify the environment file to use\n"
exit "${1:-1}"
}
# Get options
while getopts ":e:" option; do
case "${option}" in
e)
if ! [ -f "${OPTARG}" ]; then
>&2 printf "Error: Specified environment file does not exist: '%s'.\n" "${OPTARG}"
elif ! [ -r "${OPTARG}" ]; then
>&2 printf "Error: Specified environment file is not readable: '%s'.\n" "${OPTARG}"
fi
env_file="${OPTARG}"
;;
*)
>&2 printf "Error: Invalid option: '%s'.\n" "${option}"
usage
;;
esac
done
shift $(( OPTIND - 1 ))
# Check arguments
if [ $# -ne 1 ]; then
>&2 printf "Error: You need to specify a destination.\n"
usage
elif ! [ -d "${1}" ]; then
>&2 printf "Error: Specified destination does not exist or is not readable : '%s'.\n" "${1}"
usage
else
destination="${1}"
fi
# Retrieve container names
base_container="$( docker ps --format '{{.Names}}' | grep -E 'vaultwarden-2_vaultwarden-2-[0-9a-z]{10}-[0-9a-z]{5}' )"
database_container="$( docker ps --format '{{.Names}}' | grep postgres_vaultwarden-2-cnpg-main-1 )"
if ! [[ -n "${base_container}" && -n "${database_container}" ]]; then
>&2 printf "Error: Not all containers could be found.\n"
exit 2
fi
# Abort entire script if any command fails
set -e
# Database backup
# Filename for database backup
database_backupfile="vaultwarden-sqlbkp_$(date +'%Y%m%d').bak"
host_database_backupfile="${destination}/${database_backupfile}"
# Create backup file in docker container
echo 'Backing up database'
docker exec --env-file "${env_file:=.env}" "${database_container}" pg_dump 'vaultwarden' -cwv -h 'localhost' -U 'vaultwarden' > "${host_database_backupfile}"
# Files backup
for file in 'attachments' 'sends' 'rsa_key.pem' 'rsa_key.pub.pem'; do # 'config.json'
printf 'Copying %s\n' "${file}"
docker cp -a "${base_container}":"/data/${file}" "${destination}"
done
# Backup cleanup
# Only keep 30 days of backups, seems about right.
echo 'Cleaning up old database backups'
find "${destination}" -name '*sqlbkp*' -type f -mtime +30 -print -delete
echo 'Done'