Files
app-backup/usr-local-sbin/app-backup.sh

442 lines
16 KiB
Bash
Executable File

#!/usr/bin/env bash
set -Eeuo pipefail
umask 027
# ==============================================================================
# app-backup.sh
# - Separate archives per component (db / wordpress / nextcloud-code / nextcloud-data / gitea)
# - rclone tuned for large files (OneDrive chunking + timeouts + conservative concurrency)
# - Nextcloud "data" excluded from code backup (layout: ${NC_DIR}/data)
# - Gitea native install (systemd), data path configurable (default: /var/lib/gitea/data)
# ==============================================================================
# ---------- Logging ----------
LOG_DIR="/var/log/app-backup"
mkdir -p "$LOG_DIR"
ts="$(date '+%Y-%m-%d_%H-%M-%S')"
LOG_FILE="${LOG_DIR}/app-backup_${ts}.log"
exec > >(tee -a "$LOG_FILE" | systemd-cat -t app-backup -p info) 2>&1
# ---------- Config ----------
CONFIG_FILE="/etc/app-backup/app-backup.conf"
if [[ -r "$CONFIG_FILE" ]]; then
# shellcheck disable=SC1090
source "$CONFIG_FILE"
else
echo "ERROR: Config not found/readable: $CONFIG_FILE"
exit 2
fi
# ---------- Defaults (if not set in conf) ----------
: "${WORKDIR:=/var/backups/app-backup}"
: "${STAGING_ROOT:=${WORKDIR}/staging}"
: "${ARCHIVE_DIR:=${WORKDIR}/archives}"
: "${LOCAL_RETENTION_DAYS:=7}"
: "${COMPRESSOR:=zstd}" # zstd|gzip
: "${ARCHIVE_PREFIX:=appbackup}" # file prefix
# rclone
: "${RCLONE_BIN:=rclone}"
: "${RCLONE_REMOTE_BASE:=OneDrive:Sicherung/JRITServerBackups/$(hostname -s)}" # remote folder
: "${RCLONE_RETRIES:=10}"
: "${RCLONE_LOW_LEVEL_RETRIES:=40}"
: "${RCLONE_RETRIES_SLEEP:=30s}"
: "${RCLONE_STATS:=1m}"
: "${RCLONE_BWLIMIT:=0}" # "0" = no limit
: "${ENABLE_UPLOAD:=true}"
# large-file stability (OneDrive)
: "${RCLONE_ONEDRIVE_CHUNK_SIZE:=64M}"
: "${RCLONE_TIMEOUT:=1h}"
: "${RCLONE_CONTIMEOUT:=30s}"
: "${RCLONE_TRANSFERS:=2}"
: "${RCLONE_CHECKERS:=4}"
: "${REMOTE_RETENTION_DAYS:=30}"
: "${ENABLE_REMOTE_RETENTION:=true}"
# Disk-space safety
: "${MIN_FREE_GB:=12}"
# Process niceness
: "${NICE_LEVEL:=10}"
: "${IONICE_CLASS:=2}"
: "${IONICE_LEVEL:=6}"
# Mail reporting
: "${ENABLE_MAIL_REPORT:=true}"
: "${MAIL_TO:=johannes}"
: "${MAIL_FROM:=app-backup@$(hostname -f 2>/dev/null || hostname)}"
: "${MAIL_SUBJECT_PREFIX:=[app-backup]}"
: "${MAIL_INCLUDE_LOG_TAIL_LINES:=200}"
# ---------- State for report ----------
START_EPOCH="$(date +%s)"
STATUS="SUCCESS"
ERROR_SUMMARY=""
RCLONE_STATUS="SKIPPED"
RCLONE_OUTPUT_FILE=""
SIZES_FILE=""
# ---------- Helpers ----------
die() { echo "ERROR: $*"; exit 1; }
have() { command -v "$1" >/dev/null 2>&1; }
human_bytes() { local b="${1:-0}"; if have numfmt; then numfmt --to=iec-i --suffix=B "$b"; else echo "${b}B"; fi; }
bytes_of_path() { local p="$1"; [[ -e "$p" ]] && (du -sb "$p" 2>/dev/null | awk '{print $1}' || du -sB1 "$p" | awk '{print $1}') || echo 0; }
free_bytes_workdir_fs() { df -PB1 "$WORKDIR" | awk 'NR==2{print $4}'; }
ensure_min_free_space() {
local min_bytes=$((MIN_FREE_GB * 1024 * 1024 * 1024))
local avail; avail="$(free_bytes_workdir_fs)"
echo "-- Free space on WORKDIR filesystem: $(human_bytes "$avail") (min required: ${MIN_FREE_GB}GiB)"
[[ "$avail" -ge "$min_bytes" ]] || die "Not enough free space on WORKDIR filesystem (need >= ${MIN_FREE_GB}GiB)."
}
cleanup_old_local_archives() {
mkdir -p "$ARCHIVE_DIR"
echo "-- Local retention: deleting archives older than ${LOCAL_RETENTION_DAYS} day(s) from ${ARCHIVE_DIR}"
find "$ARCHIVE_DIR" -type f -name "${ARCHIVE_PREFIX}_*.tar.*" -mtime "+${LOCAL_RETENTION_DAYS}" -print -delete 2>/dev/null || true
}
send_report_mail() {
[[ "${ENABLE_MAIL_REPORT}" == "true" ]] || return 0
local SENDMAIL_BIN="/usr/sbin/sendmail"
[[ -x "$SENDMAIL_BIN" ]] || { echo "WARN: sendmail missing at $SENDMAIL_BIN"; return 0; }
local end_epoch now duration subject host
end_epoch="$(date +%s)"
now="$(date -Is)"
duration="$((end_epoch - START_EPOCH))"
host="$(hostname -f 2>/dev/null || hostname)"
subject="${MAIL_SUBJECT_PREFIX} ${STATUS} ${host} ${ts}"
{
echo "From: ${MAIL_FROM}"
echo "To: ${MAIL_TO}"
echo "Subject: ${subject}"
echo "Date: $(date -R)"
echo "MIME-Version: 1.0"
echo "Content-Type: text/plain; charset=UTF-8"
echo
echo "Backup Report"
echo "============="
echo "Zeit: ${now}"
echo "Host: ${host}"
echo "Status: ${STATUS}"
[[ -n "${ERROR_SUMMARY}" ]] && echo "Fehler: ${ERROR_SUMMARY}"
echo "Dauer: ${duration}s"
echo
echo "Config: ${CONFIG_FILE}"
echo "Log: ${LOG_FILE}"
echo "Workdir: ${WORKDIR}"
echo "Archive dir: ${ARCHIVE_DIR}"
echo "Kompression: ${COMPRESSOR}"
echo
echo "Remote"
echo "------"
echo "Upload: ${ENABLE_UPLOAD}"
echo "Remote base: ${RCLONE_REMOTE_BASE}"
echo "Upload Status: ${RCLONE_STATUS}"
[[ -n "${RCLONE_OUTPUT_FILE}" && -f "${RCLONE_OUTPUT_FILE}" ]] && { echo; echo "rclone Tail:"; tail -n 60 "${RCLONE_OUTPUT_FILE}" || true; }
echo
echo "Größen"
echo "------"
[[ -n "${SIZES_FILE}" && -f "${SIZES_FILE}" ]] && cat "${SIZES_FILE}" || echo "(keine Größeninfos verfügbar)"
echo
echo "Log-Auszug (Tail)"
echo "-----------------"
tail -n "${MAIL_INCLUDE_LOG_TAIL_LINES}" "${LOG_FILE}" || true
} | "$SENDMAIL_BIN" -t || echo "WARN: sending mail failed"
}
cleanup_staging() { [[ -n "${STAGING_DIR:-}" && -d "${STAGING_DIR:-}" ]] && rm -rf "${STAGING_DIR:?}"; }
# Nextcloud maintenance-mode safety trap
NC_MAINTENANCE_ON=false
nc_maintenance_off() {
if [[ "${NC_MAINTENANCE_ON}" == "true" ]]; then
echo "-- Nextcloud maintenance mode OFF (trap)..."
sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" maintenance:mode --off || true
NC_MAINTENANCE_ON=false
fi
}
# Gitea service safety trap
GITEA_WAS_STOPPED=false
gitea_service_start() {
if [[ "${GITEA_WAS_STOPPED}" == "true" ]]; then
echo "-- Starting Gitea service (trap)..."
systemctl start "${GITEA_SERVICE_NAME}" || true
GITEA_WAS_STOPPED=false
fi
}
on_error() { local ec=$?; STATUS="FAIL"; ERROR_SUMMARY="Exit code ${ec} (see log)"; return 0; }
on_exit() { local ec=$?; send_report_mail; nc_maintenance_off; gitea_service_start; cleanup_staging; exit "${ec}"; }
trap on_error ERR
trap on_exit EXIT
# ---------- Start ----------
echo "== app-backup start: ${ts} =="
echo "-- Host: $(hostname -f 2>/dev/null || hostname)"
echo "-- Config: ${CONFIG_FILE}"
echo "-- Log: ${LOG_FILE}"
# ---------- Preconditions ----------
[[ $EUID -eq 0 ]] || die "Must run as root."
mkdir -p "$WORKDIR" "$ARCHIVE_DIR" "$STAGING_ROOT" "$LOG_DIR"
# ---------- Locking ----------
LOCKFILE="/run/app-backup.lock"
exec 9>"$LOCKFILE"
if ! flock -n 9; then
STATUS="FAIL"
ERROR_SUMMARY="Another backup already running (lock: $LOCKFILE)"
exit 3
fi
# ---------- Tools ----------
for t in tar rsync flock df find stat; do have "$t" || die "Missing required tool: $t"; done
if [[ "$COMPRESSOR" == "zstd" ]]; then have zstd || die "COMPRESSOR=zstd but zstd is missing"
elif [[ "$COMPRESSOR" == "gzip" ]]; then have gzip || die "COMPRESSOR=gzip but gzip is missing"
else die "Unsupported COMPRESSOR=$COMPRESSOR (use zstd or gzip)"; fi
if [[ "${ENABLE_DB_DUMPS}" == "true" ]]; then
have mysqldump || die "ENABLE_DB_DUMPS=true but mysqldump missing"
have mysql || die "ENABLE_DB_DUMPS=true but mysql client missing"
fi
have "$RCLONE_BIN" || die "rclone not installed (missing: $RCLONE_BIN)"
# ---------- Disk safety ----------
cleanup_old_local_archives
ensure_min_free_space
# ---------- Staging ----------
STAGING_DIR="${STAGING_ROOT}/run_${ts}"
mkdir -p "$STAGING_DIR"/{db,files,meta}
echo "$(date -Is)" > "$STAGING_DIR/meta/created_at.txt"
echo "$(hostname -f 2>/dev/null || hostname)" > "$STAGING_DIR/meta/hostname.txt"
echo "${ts}" > "$STAGING_DIR/meta/timestamp.txt"
# ---------- Services consistency ----------
if [[ "${ENABLE_GITEA:-false}" == "true" && "${ENABLE_GITEA_SERVICE_STOP:-true}" == "true" ]]; then
if systemctl is-active --quiet "${GITEA_SERVICE_NAME}"; then
echo "-- Stopping Gitea service for consistent backup: ${GITEA_SERVICE_NAME}"
systemctl stop "${GITEA_SERVICE_NAME}"
GITEA_WAS_STOPPED=true
fi
fi
# ---------- DB Dumps ----------
if [[ "${ENABLE_DB_DUMPS}" == "true" ]]; then
echo "-- DB dumps enabled"
dump_mysql_db() {
local cnf="$1" db="$2" out="$3"
[[ -r "$cnf" ]] || die "DB CNF not readable: $cnf"
echo "-- Dump MySQL/MariaDB DB: ${db}"
mysqldump --defaults-extra-file="$cnf" --single-transaction --routines --triggers --hex-blob "$db" > "$out"
}
[[ -n "${WP_DB_NAME:-}" ]] && dump_mysql_db "${WP_DB_CNF}" "${WP_DB_NAME}" "$STAGING_DIR/db/wordpress_${ts}.sql" || true
if [[ -n "${NC_DB_NAME:-}" ]]; then
if [[ "${ENABLE_NEXTCLOUD_MAINTENANCE:-true}" == "true" ]]; then
echo "-- Nextcloud maintenance mode ON..."
sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" maintenance:mode --on
NC_MAINTENANCE_ON=true
fi
dump_mysql_db "${NC_DB_CNF}" "${NC_DB_NAME}" "$STAGING_DIR/db/nextcloud_${ts}.sql"
if [[ "${ENABLE_NEXTCLOUD_MAINTENANCE:-true}" == "true" ]]; then
echo "-- Nextcloud maintenance mode OFF..."
sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" maintenance:mode --off || true
NC_MAINTENANCE_ON=false
fi
fi
if [[ "${ENABLE_GITEA:-false}" == "true" ]]; then
# native gitea with MariaDB
[[ -n "${GITEA_DB_NAME:-}" ]] && dump_mysql_db "${GITEA_DB_CNF}" "${GITEA_DB_NAME}" "$STAGING_DIR/db/gitea_${ts}.sql" || echo "WARN: ENABLE_GITEA=true but GITEA_DB_NAME empty - skipping gitea DB"
fi
else
echo "-- DB dumps disabled"
fi
# ---------- File copies ----------
echo "-- Collecting files via rsync..."
rsync_dir() {
local src="$1"
local dst="$2"
shift 2 || true
[[ -d "$src" ]] || die "Source directory missing: $src"
mkdir -p "$dst"
local excludes=()
while [[ $# -gt 0 ]]; do excludes+=("--exclude=$1"); shift; done
rsync -aHAX --numeric-ids --delete --info=stats2 "${excludes[@]}" "$src"/ "$dst"/
}
# WordPress webroot: exclude nextcloud/ if it lives below WP_DIR
if [[ "${ENABLE_WORDPRESS:-false}" == "true" ]]; then
echo "-- WordPress files: ${WP_DIR}"
wp_excludes=()
if [[ "${ENABLE_NEXTCLOUD:-false}" == "true" ]]; then
wp="${WP_DIR%/}"; nc="${NC_DIR%/}"
if [[ "$nc" == "$wp/nextcloud" ]]; then
wp_excludes+=("nextcloud/")
fi
fi
if [[ "${#wp_excludes[@]}" -gt 0 ]]; then
echo "-- WordPress excludes: ${wp_excludes[*]}"
rsync_dir "${WP_DIR}" "$STAGING_DIR/files/wordpress" "${wp_excludes[@]}"
else
rsync_dir "${WP_DIR}" "$STAGING_DIR/files/wordpress"
fi
fi
# Nextcloud code: exclude data/
if [[ "${ENABLE_NEXTCLOUD:-false}" == "true" ]]; then
echo "-- Nextcloud code: ${NC_DIR} (excluding data/)"
rsync_dir "${NC_DIR}" "$STAGING_DIR/files/nextcloud" "data/"
: "${NC_DATA_DIR:=${NC_DIR%/}/data}"
if [[ "${ENABLE_NEXTCLOUD_DATA:-true}" == "true" ]]; then
echo "-- Nextcloud data: ${NC_DATA_DIR}"
rsync_dir "${NC_DATA_DIR}" "$STAGING_DIR/files/nextcloud-data"
fi
fi
# Gitea files (based on app.ini APP_DATA_PATH)
if [[ "${ENABLE_GITEA:-false}" == "true" ]]; then
: "${GITEA_DATA_DIR:=/var/lib/gitea/data}"
echo "-- Gitea data dir: ${GITEA_DATA_DIR}"
rsync_dir "${GITEA_DATA_DIR}" "$STAGING_DIR/files/gitea-data"
: "${GITEA_ETC_DIR:=/etc/gitea}"
if [[ -n "${GITEA_ETC_DIR}" && -d "${GITEA_ETC_DIR}" ]]; then
echo "-- Gitea config dir: ${GITEA_ETC_DIR}"
rsync_dir "${GITEA_ETC_DIR}" "$STAGING_DIR/files/gitea-etc"
fi
fi
# ---------- Size summary ----------
SIZES_FILE="${STAGING_DIR}/meta/sizes.txt"
{
echo "DB dumps staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/db")")"
echo "WordPress staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/wordpress")")"
echo "Nextcloud code staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/nextcloud")")"
echo "Nextcloud data staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/nextcloud-data")")"
echo "Gitea data staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/gitea-data")")"
echo "Gitea etc staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/gitea-etc")")"
echo "Staging total: $(human_bytes "$(bytes_of_path "$STAGING_DIR")")"
} > "$SIZES_FILE" || true
ensure_min_free_space
# ---------- Create separate archives ----------
make_archive() {
local label="$1" src_rel="$2"
local tar_file="${ARCHIVE_DIR}/${ARCHIVE_PREFIX}_${ts}_${label}.tar"
local out_file
echo "-- Creating archive (${label}): ${tar_file}"
(
cd "$STAGING_DIR"
tar --numeric-owner --xattrs --acls -cf "$tar_file" "$src_rel"
)
if [[ "$COMPRESSOR" == "zstd" ]]; then
out_file="${tar_file}.zst"
echo "-- Compressing (zstd): ${out_file}"
ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" zstd -T0 -19 --rm "$tar_file"
zstd -t "$out_file"
else
out_file="${tar_file}.gz"
echo "-- Compressing (gzip): ${out_file}"
ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" gzip -9 "$tar_file"
gzip -t "$out_file"
fi
echo "$out_file"
}
ARCHIVES=()
ARCHIVES+=("$(make_archive "meta" "meta")")
if [[ -d "$STAGING_DIR/db" && -n "$(ls -A "$STAGING_DIR/db" 2>/dev/null || true)" ]]; then
ARCHIVES+=("$(make_archive "db" "db")")
fi
[[ "${ENABLE_WORDPRESS:-false}" == "true" ]] && ARCHIVES+=("$(make_archive "wordpress" "files/wordpress")") || true
if [[ "${ENABLE_NEXTCLOUD:-false}" == "true" ]]; then
ARCHIVES+=("$(make_archive "nextcloud" "files/nextcloud")")
[[ "${ENABLE_NEXTCLOUD_DATA:-true}" == "true" ]] && ARCHIVES+=("$(make_archive "nextcloud-data" "files/nextcloud-data")") || true
fi
if [[ "${ENABLE_GITEA:-false}" == "true" ]]; then
ARCHIVES+=("$(make_archive "gitea" "files/gitea-data")")
if [[ -d "$STAGING_DIR/files/gitea-etc" && -n "$(ls -A "$STAGING_DIR/files/gitea-etc" 2>/dev/null || true)" ]]; then
ARCHIVES+=("$(make_archive "gitea-etc" "files/gitea-etc")")
fi
fi
echo "-- Archives created:"
for f in "${ARCHIVES[@]}"; do
echo " - $f ($(du -h "$f" | awk '{print $1}'))"
done
# restart gitea before upload
gitea_service_start
# ---------- Upload via rclone ----------
if [[ "${ENABLE_UPLOAD}" == "true" ]]; then
RCLONE_OUTPUT_FILE="${LOG_DIR}/rclone_${ts}.log"
RCLONE_STATUS="RUNNING"
remote_run="${RCLONE_REMOTE_BASE}/${ARCHIVE_PREFIX}_${ts}"
echo "-- rclone remote check: ${RCLONE_REMOTE_BASE}"
"$RCLONE_BIN" lsf "${RCLONE_REMOTE_BASE}" --max-depth 1 >/dev/null 2>&1 || die "Remote not reachable: ${RCLONE_REMOTE_BASE}"
echo "-- Creating remote folder: ${remote_run}"
"$RCLONE_BIN" mkdir "${remote_run}" >/dev/null 2>&1 || true
common_args=(
"--checksum"
"--retries" "${RCLONE_RETRIES}"
"--low-level-retries" "${RCLONE_LOW_LEVEL_RETRIES}"
"--retries-sleep" "${RCLONE_RETRIES_SLEEP}"
"--stats" "${RCLONE_STATS}"
"--stats-one-line"
"--log-level" "INFO"
"--transfers" "${RCLONE_TRANSFERS}"
"--checkers" "${RCLONE_CHECKERS}"
"--timeout" "${RCLONE_TIMEOUT}"
"--contimeout" "${RCLONE_CONTIMEOUT}"
"--onedrive-chunk-size" "${RCLONE_ONEDRIVE_CHUNK_SIZE}"
)
[[ "${RCLONE_BWLIMIT}" != "0" ]] && common_args+=("--bwlimit" "${RCLONE_BWLIMIT}") || true
echo "-- Uploading archives to: ${remote_run} (log: ${RCLONE_OUTPUT_FILE})"
for f in "${ARCHIVES[@]}"; do
echo "-- Upload: $(basename "$f")"
if ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" "$RCLONE_BIN" copy "$f" "${remote_run}" "${common_args[@]}" | tee -a "$RCLONE_OUTPUT_FILE"
then
:
else
RCLONE_STATUS="FAIL"
die "Upload failed for $(basename "$f") (see ${RCLONE_OUTPUT_FILE})"
fi
done
RCLONE_STATUS="OK"
if [[ "${ENABLE_REMOTE_RETENTION}" == "true" ]]; then
echo "-- Remote retention: delete objects older than ${REMOTE_RETENTION_DAYS}d (best effort)"
"$RCLONE_BIN" delete "${RCLONE_REMOTE_BASE}" --min-age "${REMOTE_RETENTION_DAYS}d" --log-level INFO || true
fi
else
echo "-- Upload disabled (ENABLE_UPLOAD=false)"
fi
cleanup_old_local_archives
echo "== app-backup done: ${ts} =="