Added support for encyption and moved from one big backup file to smaller ones for each application.

This commit is contained in:
2026-02-16 07:49:41 +01:00
parent cb7891337e
commit 099688ea48
6 changed files with 252 additions and 227 deletions

View File

@@ -28,6 +28,15 @@ else
fi
# ---------- Defaults (if not set in conf) ----------
: "${ENABLE_MAIL:=false}"
: "${MAIL_BACKUP_MODE:=live}" # live|stop (stop briefly stops postfix/dovecot)
: "${MAIL_DIR:=/var/vmail}"
: "${POSTFIX_DIR:=/etc/postfix}"
: "${DOVECOT_DIR:=/etc/dovecot}"
: "${ENABLE_MAIL_HOME_DIRS:=true}"
: "${MAIL_HOME_DIRS:=/home/johannes /home/luciana}"
: "${MAIL_HOME_EXCLUDES:=--exclude=OneDrive/ --exclude=.cache/ --exclude=.ccache/ --exclude=.local/share/Trash/}"
: "${WORKDIR:=/var/backups/app-backup}"
: "${STAGING_ROOT:=${WORKDIR}/staging}"
: "${ARCHIVE_DIR:=${WORKDIR}/archives}"
@@ -47,7 +56,7 @@ fi
: "${ENABLE_UPLOAD:=true}"
# large-file stability (OneDrive)
: "${RCLONE_ONEDRIVE_CHUNK_SIZE:=64M}"
: "${RCLONE_ONEDRIVE_CHUNK_SIZE:=80Mi}"
: "${RCLONE_TIMEOUT:=1h}"
: "${RCLONE_CONTIMEOUT:=30s}"
: "${RCLONE_TRANSFERS:=2}"
@@ -80,9 +89,25 @@ RCLONE_OUTPUT_FILE=""
SIZES_FILE=""
# ---------- Helpers ----------
die() { echo "ERROR: $*"; exit 1; }
die() { STATUS="FAIL"; ERROR_SUMMARY="${1:-Unknown error}"; echo "ERROR: $*"; exit 1; }
have() { command -v "$1" >/dev/null 2>&1; }
require_crypt_remote() {
local remote_name="$1"
local cfg
cfg="$("$RCLONE_BIN" config show "$remote_name" 2>/dev/null || true)"
[[ -n "$cfg" ]] || die "rclone remote \"$remote_name\" not found in config"
echo "$cfg" | grep -Eq "^type\s*=\s*crypt\s*$" || die "Remote \"$remote_name\" is not a crypt remote (type!=crypt). Refusing to upload unencrypted."
# These options may be omitted if defaults are used. If present and set to disable encryption, fail.
if echo "$cfg" | grep -Eq "^filename_encryption\s*=\s*off\s*$"; then
die "Remote \"$remote_name\" has filename_encryption=off. Filenames would be visible in OneDrive."
fi
if echo "$cfg" | grep -Eq "^directory_name_encryption\s*=\s*(false|off)\s*$"; then
die "Remote \"$remote_name\" has directory_name_encryption disabled. Directory names would be visible in OneDrive."
fi
}
human_bytes() { local b="${1:-0}"; if have numfmt; then numfmt --to=iec-i --suffix=B "$b"; else echo "${b}B"; fi; }
bytes_of_path() { local p="$1"; [[ -e "$p" ]] && (du -sb "$p" 2>/dev/null | awk '{print $1}' || du -sB1 "$p" | awk '{print $1}') || echo 0; }
free_bytes_workdir_fs() { df -PB1 "$WORKDIR" | awk 'NR==2{print $4}'; }
@@ -321,9 +346,72 @@ if [[ "${ENABLE_GITEA:-false}" == "true" ]]; then
fi
fi
\
if [[ "${ENABLE_MAIL}" == "true" ]]; then
echo "-- Mail files..."
mkdir -p "$STAGING_DIR/files/mail" "$STAGING_DIR/files/postfix" "$STAGING_DIR/files/dovecot" "$STAGING_DIR/files/mail-home"
# Optional short stop for maximum consistency
if [[ "${MAIL_BACKUP_MODE}" == "stop" ]]; then
echo "-- Stopping postfix/dovecot for mail backup..."
systemctl stop postfix || true
systemctl stop dovecot || true
fi
[[ -n "${MAIL_DIR:-}" && -d "${MAIL_DIR}" ]] && rsync_dir "${MAIL_DIR}" "$STAGING_DIR/files/mail" || true
[[ -n "${POSTFIX_DIR:-}" && -d "${POSTFIX_DIR}" ]] && rsync_dir "${POSTFIX_DIR}" "$STAGING_DIR/files/postfix" || true
[[ -n "${DOVECOT_DIR:-}" && -d "${DOVECOT_DIR}" ]] && rsync_dir "${DOVECOT_DIR}" "$STAGING_DIR/files/dovecot" || true
# Home mail dirs (per-user mail files under /home)
if [[ "${ENABLE_MAIL_HOME_DIRS}" == "true" ]]; then
for h in ${MAIL_HOME_DIRS}; do
if [[ -d "${h}" ]]; then
user="$(basename "${h}")"
echo "-- Mail (home): ${h}"
dest="$STAGING_DIR/files/mail-home/${user}"
mkdir -p "$dest"
# Some home subdirs (e.g. FUSE mounts like OneDrive) may deny access or cause
# noise during backup. We keep this resilient:
# - skip if the user does not exist
# - run rsync as root (so we can write into the root-owned staging dir)
# - prevent crossing filesystem boundaries (skip mounts)
# - exclude noisy/unwanted dirs via MAIL_HOME_EXCLUDES
if ! id "${user}" >/dev/null 2>&1; then
echo "WARNING: mail-home user '${user}' does not exist (skipping ${h})" >&2
continue
fi
chown root:root "$dest" 2>/dev/null || true
chmod 0750 "$dest" 2>/dev/null || true
set +e
rsync -aHAX --numeric-ids --info=stats2 --one-file-system ${MAIL_HOME_EXCLUDES} "${h}/" "${dest}/"
rc=$?
set -e
if [[ $rc -ne 0 ]]; then
echo "WARNING: rsync for ${h} returned code ${rc} (continuing)" >&2
fi
fi
done
fi
if [[ "${MAIL_BACKUP_MODE}" == "stop" ]]; then
echo "-- Starting postfix/dovecot after mail backup..."
systemctl start dovecot || true
systemctl start postfix || true
fi
fi
# ---------- Size summary ----------
SIZES_FILE="${STAGING_DIR}/meta/sizes.txt"
{
echo "Mail-home staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/mail-home")")"
echo "Mail staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/mail")")"
echo "Postfix staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/postfix")")"
echo "Dovecot staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/dovecot")")"
echo "DB dumps staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/db")")"
echo "WordPress staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/wordpress")")"
echo "Nextcloud code staged: $(human_bytes "$(bytes_of_path "$STAGING_DIR/files/nextcloud")")"
@@ -340,8 +428,7 @@ make_archive() {
local label="$1" src_rel="$2"
local tar_file="${ARCHIVE_DIR}/${ARCHIVE_PREFIX}_${ts}_${label}.tar"
local out_file
echo "-- Creating archive (${label}): ${tar_file}"
echo "-- Creating archive (${label}): ${tar_file}" >&2
(
cd "$STAGING_DIR"
tar --numeric-owner --xattrs --acls -cf "$tar_file" "$src_rel"
@@ -349,16 +436,16 @@ make_archive() {
if [[ "$COMPRESSOR" == "zstd" ]]; then
out_file="${tar_file}.zst"
echo "-- Compressing (zstd): ${out_file}"
ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" zstd -T0 -19 --rm "$tar_file"
zstd -t "$out_file"
echo "-- Compressing (zstd): ${out_file}" >&2
ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" zstd -T0 -19 --rm -q "$tar_file" 1>&2
zstd -t "$out_file" 1>&2
else
out_file="${tar_file}.gz"
echo "-- Compressing (gzip): ${out_file}"
echo "-- Compressing (gzip): ${out_file}" >&2
ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" gzip -9 "$tar_file"
gzip -t "$out_file"
fi
echo "$out_file" >&2
echo "$out_file"
}
@@ -380,6 +467,17 @@ if [[ "${ENABLE_GITEA:-false}" == "true" ]]; then
fi
fi
# Filter empty archive entries
ARCHIVES=($(for a in "${ARCHIVES[@]:-}"; do [[ -n "$a" ]] && echo "$a"; done))
# Mail archives (vmail/postfix/dovecot + optional /home mail dirs)
if [[ "${ENABLE_MAIL}" == "true" ]]; then
ARCHIVES+=("$(make_archive "mail" "files/mail")")
ARCHIVES+=("$(make_archive "mail-home" "files/mail-home")")
ARCHIVES+=("$(make_archive "postfix" "files/postfix")")
ARCHIVES+=("$(make_archive "dovecot" "files/dovecot")")
fi
echo "-- Archives created:"
for f in "${ARCHIVES[@]}"; do
echo " - $f ($(du -h "$f" | awk '{print $1}'))"
@@ -394,8 +492,20 @@ if [[ "${ENABLE_UPLOAD}" == "true" ]]; then
RCLONE_STATUS="RUNNING"
remote_run="${RCLONE_REMOTE_BASE}/${ARCHIVE_PREFIX}_${ts}"
echo "-- rclone remote check: ${RCLONE_REMOTE_BASE}"
"$RCLONE_BIN" lsf "${RCLONE_REMOTE_BASE}" --max-depth 1 >/dev/null 2>&1 || die "Remote not reachable: ${RCLONE_REMOTE_BASE}"
# Split "Remote:path" into remote name + path. Some rclone subcommands require only the remote name.
[[ "${RCLONE_REMOTE_BASE}" == *:* ]] || die "RCLONE_REMOTE_BASE must be in the form Remote:path (got: ${RCLONE_REMOTE_BASE})"
remote_name="${RCLONE_REMOTE_BASE%%:*}"
remote_base_path="${RCLONE_REMOTE_BASE#*:}"
echo "-- rclone remote check (crypt enforced): ${remote_name}"
require_crypt_remote "${remote_name}"
# Check remote root reachability (works even if base folder doesn't exist yet)
"$RCLONE_BIN" lsd "${remote_name}:" --max-depth 1 >/dev/null 2>&1 || die "Remote not reachable: ${remote_name}:"
# Ensure base folder exists (best effort)
echo "-- Ensuring remote base folder exists: ${RCLONE_REMOTE_BASE}"
"$RCLONE_BIN" mkdir "${RCLONE_REMOTE_BASE}" >/dev/null 2>&1 || true
echo "-- Creating remote folder: ${remote_run}"
"$RCLONE_BIN" mkdir "${remote_run}" >/dev/null 2>&1 || true
@@ -418,6 +528,7 @@ if [[ "${ENABLE_UPLOAD}" == "true" ]]; then
echo "-- Uploading archives to: ${remote_run} (log: ${RCLONE_OUTPUT_FILE})"
for f in "${ARCHIVES[@]}"; do
[[ -n "$f" ]] || continue
echo "-- Upload: $(basename "$f")"
if ionice -c "${IONICE_CLASS}" -n "${IONICE_LEVEL}" nice -n "${NICE_LEVEL}" "$RCLONE_BIN" copy "$f" "${remote_run}" "${common_args[@]}" | tee -a "$RCLONE_OUTPUT_FILE"
then
@@ -429,6 +540,10 @@ if [[ "${ENABLE_UPLOAD}" == "true" ]]; then
done
RCLONE_STATUS="OK"
# Show what landed on the remote for this run (small folder: archives only)
echo "-- Remote listing (this run): ${remote_run}"
"$RCLONE_BIN" lsl "${remote_run}" --max-depth 1 --log-level INFO || echo "WARNING: Could not list remote run folder: ${remote_run}"
if [[ "${ENABLE_REMOTE_RETENTION}" == "true" ]]; then
echo "-- Remote retention: delete objects older than ${REMOTE_RETENTION_DAYS}d (best effort)"
"$RCLONE_BIN" delete "${RCLONE_REMOTE_BASE}" --min-age "${REMOTE_RETENTION_DAYS}d" --log-level INFO || true

View File

@@ -28,7 +28,41 @@ fi
: "${ARCHIVE_PREFIX:=appbackup}"
: "${RCLONE_BIN:=rclone}"
: "${RCLONE_REMOTE_BASE:=OneDrive:Sicherung/JRITServerBackups/$(hostname -s)}"
: "${RCLONE_REMOTE_BASE:=}"
# --- Remote base resolution & crypt enforcement ---
# Prefer explicit RCLONE_REMOTE_BASE from config.
# If only RCLONE_REMOTE is provided (e.g. "OneDriveCrypt:Sicherung"), derive the host folder.
: "${RCLONE_REMOTE:=}"
if [[ -z "${RCLONE_REMOTE_BASE}" ]]; then
if [[ -n "${RCLONE_REMOTE}" ]]; then
RCLONE_REMOTE_BASE="${RCLONE_REMOTE%/}/JRITServerBackups/$(hostname -s)"
fi
fi
require_crypt_remote() {
local base="$1"
local remote_name="${base%%:*}"
[[ -n "${remote_name}" && "${remote_name}" != "${base}" ]] || die "RCLONE_REMOTE_BASE must include an rclone remote prefix like 'OneDriveCrypt:...'. Got: ${base}"
have "${RCLONE_BIN}" || die "rclone missing"
# rclone prints full config; we only need the remote block.
local cfg
cfg="$("${RCLONE_BIN}" config show "${remote_name}" 2>/dev/null || true)"
[[ -n "${cfg}" ]] || die "rclone remote '${remote_name}' not found. Check 'rclone listremotes' and your config."
local typ
typ="$(printf "%s\n" "${cfg}" | awk -F' = ' '$1=="type"{print $2; exit}')"
[[ "${typ}" == "crypt" ]] || die "Remote '${remote_name}' is type='${typ:-?}', not 'crypt'. Refusing to restore from non-crypt remote."
# Enforce filename/directory encryption so OneDrive web does not show cleartext names.
local fe de
fe="$(printf "%s\n" "${cfg}" | awk -F' = ' '$1=="filename_encryption"{print $2; exit}')"
de="$(printf "%s\n" "${cfg}" | awk -F' = ' '$1=="directory_name_encryption"{print $2; exit}')"
[[ "${fe}" == "standard" ]] || die "Remote '${remote_name}': filename_encryption='${fe:-?}' (expected 'standard'). Otherwise filenames can appear in cleartext."
[[ "${de}" == "true" ]] || die "Remote '${remote_name}': directory_name_encryption='${de:-?}' (expected 'true'). Otherwise directory names can appear in cleartext."
}
: "${DRY_RUN:=false}"
: "${RESTORE_DB:=true}"
@@ -68,182 +102,6 @@ gitea_start() { [[ "${GITEA_WAS_STOPPED}" == "true" ]] && { echo "-- Starting gi
httpd_start() { [[ "${HTTPD_WAS_STOPPED}" == "true" ]] && { echo "-- Starting httpd (trap)"; run_cmd systemctl start "${HTTPD_SERVICE_NAME}" || true; HTTPD_WAS_STOPPED=false; }; }
phpfpm_start(){ [[ "${PHPFPM_WAS_STOPPED}" == "true" ]] && { echo "-- Starting php-fpm (trap)"; run_cmd systemctl start "${PHPFPM_SERVICE_NAME}" || true; PHPFPM_WAS_STOPPED=false; }; }
on_exit() { local ec=$?; nc_maintenance_off; gitea_start; httpd_start; phpfpm_start; exit "${ec}"; }
trap on_exit EXIT
[[ $EUID -eq 0 ]] || die "Must run as root."
for t in tar rsync flock df find stat; do have "$t" || die "Missing required tool: $t"; done
mkdir -p "$WORKDIR" "$RESTORE_ROOT" "$LOG_DIR"
LOCKFILE="/run/app-backup.lock"
exec 9>"$LOCKFILE"
flock -n 9 || die "Another backup/restore already running (lock: $LOCKFILE)"
usage() {
cat <<EOF
Usage:
$0 --remote-run <run_folder_name> # e.g. ${ARCHIVE_PREFIX}_2026-02-11_02-31-28
$0 --local-run <path_to_run_dir> # directory containing archives
Options:
--dry-run
--no-db
--no-files
EOF
}
REMOTE_RUN=""
LOCAL_RUN=""
while [[ $# -gt 0 ]]; do
case "$1" in
--remote-run) REMOTE_RUN="${2:-}"; shift 2;;
--local-run) LOCAL_RUN="${2:-}"; shift 2;;
--dry-run) DRY_RUN=true; shift;;
--no-db) RESTORE_DB=false; shift;;
--no-files) RESTORE_FILES=false; shift;;
-h|--help) usage; exit 0;;
*) die "Unknown arg: $1";;
esac
done
[[ -z "${REMOTE_RUN}" && -z "${LOCAL_RUN}" ]] && { usage; exit 2; }
RUN_DIR="${RESTORE_ROOT}/run_${ts}"
DOWNLOAD_DIR="${RUN_DIR}/downloads"
EXTRACT_DIR="${RUN_DIR}/extract"
mkdir -p "$DOWNLOAD_DIR" "$EXTRACT_DIR"
if [[ -n "${REMOTE_RUN}" ]]; then
have "$RCLONE_BIN" || die "rclone missing but --remote-run used"
remote_path="${RCLONE_REMOTE_BASE}/${REMOTE_RUN}"
echo "-- Fetching archives from remote: ${remote_path} -> ${DOWNLOAD_DIR}"
run_cmd "$RCLONE_BIN" copy "${remote_path}" "${DOWNLOAD_DIR}" --checksum --log-level INFO
SRC_DIR="${DOWNLOAD_DIR}"
else
[[ -d "${LOCAL_RUN}" ]] || die "Local run dir not found: ${LOCAL_RUN}"
SRC_DIR="${LOCAL_RUN}"
fi
echo "== app-restore start: ${ts} =="
echo "-- Source dir: ${SRC_DIR}"
echo "-- DRY_RUN: ${DRY_RUN}"
detect_tar_flags() { case "$1" in *.tar.zst) echo "--zstd" ;; *.tar.gz) echo "-z" ;; *) die "Unsupported archive: $1" ;; esac; }
extract_archive() {
local f="$1" flags; flags="$(detect_tar_flags "$f")"
echo "-- Extract: $(basename "$f") -> ${EXTRACT_DIR}"
[[ "${DRY_RUN}" == "true" ]] && echo "[DRY_RUN] tar ${flags} -xf $f -C ${EXTRACT_DIR}" || tar ${flags} -xf "$f" -C "$EXTRACT_DIR"
}
pick_one() { ls -1 "${SRC_DIR}"/$1 2>/dev/null | sort | tail -n 1 || true; }
# stop services (optional)
if [[ "${ENABLE_HTTPD_STOP}" == "true" ]] && systemctl is-active --quiet "${HTTPD_SERVICE_NAME}"; then
echo "-- Stopping httpd for restore: ${HTTPD_SERVICE_NAME}"
run_cmd systemctl stop "${HTTPD_SERVICE_NAME}"; HTTPD_WAS_STOPPED=true
fi
if [[ "${ENABLE_PHPFPM_STOP}" == "true" ]] && systemctl is-active --quiet "${PHPFPM_SERVICE_NAME}"; then
echo "-- Stopping php-fpm for restore: ${PHPFPM_SERVICE_NAME}"
run_cmd systemctl stop "${PHPFPM_SERVICE_NAME}"; PHPFPM_WAS_STOPPED=true
fi
if [[ "${ENABLE_GITEA:-false}" == "true" && "${ENABLE_GITEA_SERVICE_STOP}" == "true" ]] && systemctl is-active --quiet "${GITEA_SERVICE_NAME}"; then
echo "-- Stopping gitea for restore: ${GITEA_SERVICE_NAME}"
run_cmd systemctl stop "${GITEA_SERVICE_NAME}"; GITEA_WAS_STOPPED=true
fi
# nextcloud maintenance
if [[ "${ENABLE_NEXTCLOUD:-false}" == "true" && "${ENABLE_NEXTCLOUD_MAINTENANCE}" == "true" ]] && [[ -d "${NC_DIR}" && -f "${NC_DIR}/occ" ]]; then
echo "-- Nextcloud maintenance mode ON..."
run_cmd sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" maintenance:mode --on
NC_MAINTENANCE_ON=true
fi
# extract archives
meta_arc="$(pick_one "${ARCHIVE_PREFIX}_*_meta.tar.*")"; [[ -n "$meta_arc" ]] && extract_archive "$meta_arc" || true
db_arc="$(pick_one "${ARCHIVE_PREFIX}_*_db.tar.*")"
wp_arc="$(pick_one "${ARCHIVE_PREFIX}_*_wordpress.tar.*")"
nc_arc="$(pick_one "${ARCHIVE_PREFIX}_*_nextcloud.tar.*")"
ncd_arc="$(pick_one "${ARCHIVE_PREFIX}_*_nextcloud-data.tar.*")"
g_arc="$(pick_one "${ARCHIVE_PREFIX}_*_gitea.tar.*")"
g_etc_arc="$(pick_one "${ARCHIVE_PREFIX}_*_gitea-etc.tar.*")"
[[ -n "$db_arc" ]] && extract_archive "$db_arc" || true
[[ -n "$wp_arc" && "${RESTORE_FILES}" == "true" ]] && extract_archive "$wp_arc" || true
[[ -n "$nc_arc" && "${RESTORE_FILES}" == "true" ]] && extract_archive "$nc_arc" || true
[[ -n "$ncd_arc" && "${RESTORE_FILES}" == "true" ]] && extract_archive "$ncd_arc" || true
[[ -n "$g_arc" && "${RESTORE_FILES}" == "true" ]] && extract_archive "$g_arc" || true
[[ -n "$g_etc_arc" && "${RESTORE_FILES}" == "true" ]] && extract_archive "$g_etc_arc" || true
rsync_restore_dir() {
local src="$1" dst="$2"
[[ -d "$src" ]] || die "Restore source missing: $src"
mkdir -p "$dst"
local del=(); [[ "${RESTORE_STRICT_DELETE}" == "true" ]] && del=(--delete)
run_cmd rsync -aHAX --numeric-ids --info=stats2 "${del[@]}" "$src"/ "$dst"/
}
if [[ "${RESTORE_FILES}" == "true" ]]; then
echo "-- Restoring files..."
if [[ -d "${EXTRACT_DIR}/files/wordpress" && "${ENABLE_WORDPRESS:-false}" == "true" ]]; then
echo "-- WordPress -> ${WP_DIR}"
rsync_restore_dir "${EXTRACT_DIR}/files/wordpress" "${WP_DIR}"
fi
if [[ "${ENABLE_NEXTCLOUD:-false}" == "true" ]]; then
[[ -d "${EXTRACT_DIR}/files/nextcloud" ]] && { echo "-- Nextcloud code -> ${NC_DIR}"; rsync_restore_dir "${EXTRACT_DIR}/files/nextcloud" "${NC_DIR}"; }
: "${NC_DATA_DIR:=${NC_DIR%/}/data}"
[[ -d "${EXTRACT_DIR}/files/nextcloud-data" && "${ENABLE_NEXTCLOUD_DATA:-true}" == "true" ]] && { echo "-- Nextcloud data -> ${NC_DATA_DIR}"; rsync_restore_dir "${EXTRACT_DIR}/files/nextcloud-data" "${NC_DATA_DIR}"; }
fi
if [[ "${ENABLE_GITEA:-false}" == "true" ]]; then
: "${GITEA_DATA_DIR:=/var/lib/gitea/data}"
[[ -d "${EXTRACT_DIR}/files/gitea-data" ]] && { echo "-- Gitea data -> ${GITEA_DATA_DIR}"; rsync_restore_dir "${EXTRACT_DIR}/files/gitea-data" "${GITEA_DATA_DIR}"; }
: "${GITEA_ETC_DIR:=/etc/gitea}"
[[ -d "${EXTRACT_DIR}/files/gitea-etc" && -n "${GITEA_ETC_DIR:-}" ]] && { echo "-- Gitea etc -> ${GITEA_ETC_DIR}"; rsync_restore_dir "${EXTRACT_DIR}/files/gitea-etc" "${GITEA_ETC_DIR}"; }
fi
else
echo "-- RESTORE_FILES=false (skipping)"
fi
mysql_restore_sql() {
local cnf="$1" db="$2" sql="$3"
[[ -r "$cnf" ]] || die "DB CNF not readable: $cnf"
[[ -r "$sql" ]] || die "SQL not readable: $sql"
have mysql || die "mysql client missing"
echo "-- Import MySQL/MariaDB DB: ${db} from $(basename "$sql")"
run_cmd mysql --defaults-extra-file="$cnf" "$db" < "$sql"
}
if [[ "${RESTORE_DB}" == "true" && -d "${EXTRACT_DIR}/db" ]]; then
echo "-- Restoring databases..."
wp_sql="$(ls -1 "${EXTRACT_DIR}/db"/wordpress_*.sql 2>/dev/null | sort | tail -n 1 || true)"
nc_sql="$(ls -1 "${EXTRACT_DIR}/db"/nextcloud_*.sql 2>/dev/null | sort | tail -n 1 || true)"
g_sql="$(ls -1 "${EXTRACT_DIR}/db"/gitea_*.sql 2>/dev/null | sort | tail -n 1 || true)"
[[ -n "${WP_DB_NAME:-}" && -n "$wp_sql" ]] && mysql_restore_sql "${WP_DB_CNF}" "${WP_DB_NAME}" "$wp_sql" || echo "WARN: WP DB dump missing"
[[ -n "${NC_DB_NAME:-}" && -n "$nc_sql" ]] && mysql_restore_sql "${NC_DB_CNF}" "${NC_DB_NAME}" "$nc_sql" || echo "WARN: NC DB dump missing"
[[ "${ENABLE_GITEA:-false}" == "true" && -n "${GITEA_DB_NAME:-}" && -n "$g_sql" ]] && mysql_restore_sql "${GITEA_DB_CNF}" "${GITEA_DB_NAME}" "$g_sql" || true
else
echo "-- RESTORE_DB=false or no db dump present (skipping)"
fi
if [[ "${ENABLE_NEXTCLOUD:-false}" == "true" && -d "${NC_DIR}" && -f "${NC_DIR}/occ" ]]; then
echo "-- Nextcloud post-restore: maintenance:repair"
run_cmd sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" maintenance:repair || true
if [[ "${NC_FILES_SCAN_AFTER_RESTORE}" == "true" ]]; then
echo "-- Nextcloud post-restore: files:scan --all"
run_cmd sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" files:scan --all || true
fi
if [[ "${ENABLE_NEXTCLOUD_MAINTENANCE}" == "true" ]]; then
echo "-- Nextcloud maintenance mode OFF..."
run_cmd sudo -u "${NC_OCC_USER}" php "${NC_DIR}/occ" maintenance:mode --off
NC_MAINTENANCE_ON=false
fi
fi
gitea_start
phpfpm_start
httpd_start
echo "== app-restore done: ${ts} =="
echo "-- Working dir: ${RUN_DIR}"
echo "-- Log: ${LOG_FILE}"