Compare commits
No commits in common. "33b1bd70f2c69f8d3b9ac2e94ccc4ab86f5dc058" and "75d4055d78b29fd223983e445746f728ef4fba44" have entirely different histories.
33b1bd70f2
...
75d4055d78
5 changed files with 40 additions and 200 deletions
40
backups/create_nextcloud_backup.sh
Executable file
40
backups/create_nextcloud_backup.sh
Executable file
|
@ -0,0 +1,40 @@
|
|||
#!/bin/bash
|
||||
# Backup script for Nextcloud in a kubernetes cluster
|
||||
|
||||
BACKUP_DEST='/mnt/PRIVATE_DOCS/home/backup'
|
||||
PASSFILE='./nextcloud_pass.txt'
|
||||
|
||||
# Create filename for database
|
||||
database_backupfile="nextcloud-sqlbkp_$(date +'%Y%m%d').bak"
|
||||
|
||||
# Retrieve container names
|
||||
base_container="$( docker ps --format '{{.Names}}' | grep hugocloud-nextcloud_hugocloud-nextcloud )"
|
||||
database_container="$( docker ps --format '{{.Names}}' | grep hugocloud-postgresql_hugocloud-postgresql )"
|
||||
|
||||
# Abort entire script if any command fails
|
||||
set -e
|
||||
|
||||
# Turn on maintenance mode
|
||||
docker exec --user www-data "${base_container}" php occ maintenance:mode --on
|
||||
|
||||
# Database backup
|
||||
>&2 echo 'Backing up database'
|
||||
internal_database_backupfile="/tmp/${database_backupfile}"
|
||||
docker exec --env-file "${PASSFILE}" "${database_container}" pg_dump 'nextcloud' -cwv -h 'localhost' -U 'nextcloud' -f "${internal_database_backupfile}"
|
||||
docker cp "${database_container}":"${internal_database_backupfile}" "${BACKUP_DEST}"
|
||||
|
||||
# Files backup
|
||||
for file in 'config' 'themes'; do
|
||||
>&2 printf 'Copying %s\n' "${file}"
|
||||
docker cp -a "${base_container}":"/var/www/html/${file}" "${BACKUP_DEST}"
|
||||
done
|
||||
|
||||
# Turn off maintenance mode
|
||||
docker exec --user www-data "${base_container}" php occ maintenance:mode --off
|
||||
|
||||
# Backup cleanup
|
||||
# Only keep 30 days of backups
|
||||
>&2 echo 'Clean up old database backups'
|
||||
find "${BACKUP_DEST}" -name '*sqlbkp*' -type f -mtime +30 -print -delete
|
||||
|
||||
>&2 echo 'Done'
|
Before Width: | Height: | Size: 6.2 KiB After Width: | Height: | Size: 6.2 KiB |
Before Width: | Height: | Size: 9.9 KiB After Width: | Height: | Size: 9.9 KiB |
|
@ -1,122 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Create archived backups of zfs datasets, with support for incremental backups from automated snapshots.
|
||||
# Usage: backup-zfs-dataset [OPTIONS] <dataset>
|
||||
|
||||
usage() {
|
||||
>&2 printf "Usage: %s [OPTIONS] <dataset>\n" "$0"
|
||||
>&2 printf "Options:\n"
|
||||
>&2 printf "\t-c --compression-level <level> \t Specify compression level (integer)\n"
|
||||
>&2 printf "\t-s --dataset <dataset name> \t Specify dataset name\n"
|
||||
>&2 printf "\t-d --destination <path to directory> \t Specify destination\n"
|
||||
>&2 printf "\t-m --max-size <size> \t Specify maximum size of archive parts\n"
|
||||
exit "${1:-1}"
|
||||
}
|
||||
|
||||
# Get options.
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "${1}" in
|
||||
--compression_level | -c)
|
||||
if ! [[ "${2}" =~ -[[:digit:]] ]]; then
|
||||
>&2 printf "Error: Invalid compression level: %s\n" "${2}"
|
||||
usage
|
||||
fi
|
||||
compression_level="${2}"
|
||||
shift 2
|
||||
;;
|
||||
--dataset | -s)
|
||||
if ! [ -n "${2}" ]; then
|
||||
>&2 printf "Error: Invalid dataset: %s\n" "${2}"
|
||||
usage
|
||||
fi
|
||||
dataset="${2}"
|
||||
shift 2
|
||||
;;
|
||||
--destination | -d)
|
||||
if ! [ -d "${2}" ]; then
|
||||
>&2 printf "Error: Destination directory does not exist: %s\n" "${2}"
|
||||
usage
|
||||
fi
|
||||
destination="${2}"
|
||||
shift 2
|
||||
;;
|
||||
--max-size | -m)
|
||||
if ! [[ "${2}" =~ [[:digit:]](K|M|G) ]]; then
|
||||
>&2 printf "Error: Invalid maximum size: %s\n" "${2}"
|
||||
usage
|
||||
fi
|
||||
max_size="${2}"
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
>&2 printf "Error: Invalid option: %s\n" "${1}"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check arguments.
|
||||
|
||||
if [ -z "${dataset}" ]; then
|
||||
>&2 printf "Error: You need to specify a dataset.\n"
|
||||
usage
|
||||
fi
|
||||
|
||||
# Working snapshots
|
||||
|
||||
# Find snapshots
|
||||
snapshot_location="/mnt/${dataset}/.zfs/snapshot"
|
||||
latest_auto="$( find "${snapshot_location}"/* -maxdepth 0 -name 'auto*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )"
|
||||
latest_manual="$( find "${snapshot_location}"/* -maxdepth 0 -name 'manual*' -type d | sort -n | tail -n1 | xargs -n1 basename >2 /dev/null )"
|
||||
|
||||
# Check snapshots existance
|
||||
if ! [ -n "${latest_manual}" ]; then
|
||||
>&2 printf "Error: No manual snapshot could be found!\n"
|
||||
exit 2
|
||||
fi
|
||||
if ! [ -n "${latest_auto}" ]; then
|
||||
>&2 printf "Error: No automatic snapshot could be found!\n"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
printf "Latest auto snapshot: %s\nLatest manual snapshot: %s\n" "${latest_auto}" "${latest_manual}"
|
||||
|
||||
# Abort entire script if anything fails.
|
||||
set -e
|
||||
|
||||
# Backups
|
||||
|
||||
# Base backup.
|
||||
output_filename="${destination}/${latest_manual}.gz"
|
||||
existing_backup="$( find "${destination}" -type f -name "${output_filename}.part.[a-z][a-z]" -print )" # -quit if you don't need to know every file.
|
||||
if ! [ ${existing_backup} ]; then
|
||||
printf "Info: If you've manually created a new snapshot, you might want to remove the old backups.\n"
|
||||
printf "Latest manual snapshot was not yet backed up, backing up now.\n"
|
||||
sudo zfs send --verbose "${dataset}@${latest_manual}" \
|
||||
| gzip "${compression_level:='-1'}" --verbose --rsyncable \
|
||||
| split - --verbose -b "${max_size}" "${output_filename}.part."
|
||||
printf "Written manual backup to: %s\n" "${output_filename}"
|
||||
elif [ "${force_create_manual_backup}" ]; then
|
||||
# TODO What if the new backup is smaller than the previous?
|
||||
printf "Removing previous backup files.\n"
|
||||
rm "${existing_backup}"
|
||||
printf "Backing up manual snapshot.\n"
|
||||
sudo zfs send --verbose "${dataset}@${latest_manual}" \
|
||||
| gzip "${compression_level:='-1'}" --verbose --rsyncable \
|
||||
| split - --verbose -b "${max_size}" "${output_filename}.part."
|
||||
printf "Written manual backup to: %s\n" "${output_filename}"
|
||||
else
|
||||
printf "Found existing backup of manual snapshot: %s\n" "${existing_backup}"
|
||||
fi
|
||||
|
||||
# Incremental incremental backup.
|
||||
printf "Creating incremental backup between %s and %s\n" "${latest_manual}" "${latest_auto}"
|
||||
output_filename="${destination}/${latest_manual}-${latest_auto}.gz"
|
||||
sudo zfs send -i "@${latest_manual}" "${dataset}@${latest_auto}" \
|
||||
| gzip "${compression_level}" --verbose \
|
||||
| split - --verbose -b "${max_size:='2G'}" "${output_filename}.part."
|
||||
printf "Written incremental backup to: %s\n" "${output_filename}"
|
||||
|
||||
# TODO Cleanup
|
||||
|
||||
printf "Done!\n"
|
|
@ -1,78 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Backup Nextcloud database in a Kubernetes environment.
|
||||
# Usage: backup-database <destination>
|
||||
|
||||
usage() {
|
||||
>&2 printf "Usage: %s <destination>\n" "${0}"
|
||||
exit "${1:-1}"
|
||||
}
|
||||
|
||||
# Get options
|
||||
|
||||
while getopts ":e:" option; do
|
||||
case "${option}" in
|
||||
e)
|
||||
if ! [ -f "${OPTARG}" ]; then
|
||||
>&2 printf "Error: Specified environment file does not exist: '%s'.\n" "${OPTARG}"
|
||||
elif ! [ -r "${OPTARG}" ]; then
|
||||
>&2 printf "Error: Specified environment file is not readable: '%s'.\n" "${OPTARG}"
|
||||
fi
|
||||
env_file="${OPTARG}"
|
||||
;;
|
||||
*)
|
||||
>&2 printf "Error: Invalid option: '%s'.\n" "${option}"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check arguments.
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
>&2 printf "Error: You need to specify a destination.\n"
|
||||
usage
|
||||
elif ! [ -d "${1}" ]; then
|
||||
>&2 printf "Error: Specified destination does not exist or is not readable : '%s'.\n" "${1}"
|
||||
usage
|
||||
else
|
||||
destination="${1}"
|
||||
fi
|
||||
|
||||
# Filename for database backup
|
||||
database_backupfile="nextcloud-sqlbkp_$(date +'%Y%m%d').bak"
|
||||
|
||||
# Retrieve container names
|
||||
base_container="$( docker ps --format '{{.Names}}' | grep -E 'nextcloud-2_nextcloud-2-[0-9a-z]{10}-[0-9a-z]{5}' )"
|
||||
database_container="$( docker ps --format '{{.Names}}' | grep postgres_nextcloud-2-cnpg-main-1 )"
|
||||
|
||||
if ! [[ -n "${base_container}" && -n "${database_container}" ]]; then
|
||||
>&2 printf "Error: Not all containers could be found.\n"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Abort entire script if any command fails
|
||||
set -e
|
||||
|
||||
# Turn on maintenance mode
|
||||
docker exec "${base_container}" php occ maintenance:mode --on
|
||||
|
||||
# Database backup
|
||||
echo 'Backing up database'
|
||||
host_database_backupfile="${destination}/${database_backupfile}"
|
||||
docker exec --env-file "${env_file:='./nextcloud.env'}" "${database_container}" pg_dump 'nextcloud' -cwv -h 'localhost' -U 'nextcloud' > "${host_database_backupfile}"
|
||||
|
||||
# Files backup
|
||||
for file in 'config' 'themes'; do
|
||||
printf 'Copying %s\n' "${file}"
|
||||
docker cp -a "${base_container}":"/var/www/html/${file}" "${destination}"
|
||||
done
|
||||
|
||||
# Turn off maintenance mode
|
||||
docker exec "${base_container}" php occ maintenance:mode --off
|
||||
|
||||
# Backup cleanup
|
||||
# Only keep 30 days of backups
|
||||
printf 'Clean up old database backups'
|
||||
find "${destination}" -name '*sqlbkp*' -type f -mtime +30 -print -delete
|
||||
|
||||
printf 'Done'
|
Loading…
Reference in a new issue