add backup multiple schemas

This commit is contained in:
2025-09-09 19:30:11 +02:00
parent 17ed3b8e9d
commit 37f789f2e2
2 changed files with 33 additions and 28 deletions

View File

@@ -3,8 +3,8 @@
# ==============================================================================
# SAP HANA Backup Script
#
# Performs schema exports and/or tenant backups for a SAP HANA database.
# Designed to be executed via a cronjob.
# Performs schema exports for one or more schemas and/or tenant backups for a
# SAP HANA database. Designed to be executed via a cronjob.
# Reads all settings from the backup.conf file in the same directory.
# ==============================================================================
@@ -38,41 +38,42 @@ fi
# --- Functions ---
# Performs a binary export of a specific schema.
# Accepts the schema name as its first argument.
perform_schema_export() {
echo "⬇️ Starting schema export for '${SCHEMA_NAME}'..."
local schema_name="$1"
if [[ -z "$schema_name" ]]; then
echo " ❌ Error: No schema name provided to perform_schema_export function."
return 1
fi
echo "⬇️ Starting schema export for '${schema_name}'..."
local timestamp
timestamp=$(date +%Y%m%d_%H%M%S)
local export_base_dir="${BACKUP_BASE_DIR}/schema"
local export_path="${export_base_dir}/${SCHEMA_NAME}_${timestamp}"
local export_path="${export_base_dir}/${schema_name}_${timestamp}"
local query_export_path="$export_path" # Default path for the EXPORT query
# NEW: If compression is enabled, export to a temporary directory
if [[ "$COMPRESS_SCHEMA" == "true" ]]; then
export_path="${export_base_dir}/tmp/${SCHEMA_NAME}_${timestamp}"
export_path="${export_base_dir}/tmp/${schema_name}_${timestamp}"
query_export_path="$export_path"
echo " Compression enabled. Using temporary export path: ${export_path}"
fi
local archive_file="${export_base_dir}/${SCHEMA_NAME}_${timestamp}.tar.gz"
local archive_file="${export_base_dir}/${schema_name}_${timestamp}.tar.gz"
# Create the target directory if it doesn't exist
mkdir -p "$(dirname "$export_path")"
# Construct and execute the EXPORT query
local query="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${query_export_path}' WITH REPLACE THREADS ${THREADS};"
local query="EXPORT \"${schema_name}\".\"*\" AS BINARY INTO '${query_export_path}' WITH REPLACE THREADS ${THREADS};"
# We redirect stdout and stderr to /dev/null for cleaner cron logs.
"$HDBSQL_PATH" -U "$USER_KEY" "$query" > /dev/null 2>&1
local exit_code=$?
if [[ "$exit_code" -eq 0 ]]; then
echo " ✅ Successfully exported schema '${SCHEMA_NAME}'."
echo " ✅ Successfully exported schema '${schema_name}'."
# NEW: Conditional compression logic
if [[ "$COMPRESS_SCHEMA" == "true" ]]; then
echo " 🗜️ Compressing exported files..."
# Use -C to change directory, ensuring the archive doesn't contain the 'tmp' path
tar -czf "$archive_file" -C "$(dirname "$export_path")" "$(basename "$export_path")"
local tar_exit_code=$?
@@ -80,7 +81,6 @@ perform_schema_export() {
echo " ✅ Successfully created archive '${archive_file}'."
echo " 🧹 Cleaning up temporary directory..."
rm -rf "$export_path"
# Clean up the tmp parent if it's empty
rmdir --ignore-fail-on-non-empty "$(dirname "$export_path")"
echo " ✨ Cleanup complete."
else
@@ -90,10 +90,24 @@ perform_schema_export() {
echo " Compression disabled. Raw export files are located at '${export_path}'."
fi
else
echo " ❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${exit_code})."
echo " ❌ Error: Failed to export schema '${schema_name}' (hdbsql exit code: ${exit_code})."
fi
}
# NEW: Loops through the schemas in the config file and runs an export for each.
run_all_schema_exports() {
if [[ -z "$SCHEMA_NAMES" ]]; then
echo " ⚠️ Warning: SCHEMA_NAMES variable is not set in config. Skipping schema export."
return
fi
echo "🔎 Found schemas to export: ${SCHEMA_NAMES}"
for schema in $SCHEMA_NAMES; do
perform_schema_export "$schema"
echo "--------------------------------------------------"
done
}
# Performs a full backup of the tenant database.
perform_tenant_backup() {
echo "⬇️ Starting tenant backup..."
@@ -104,7 +118,6 @@ perform_tenant_backup() {
local backup_path_prefix
local backup_target_dir
# NEW: Determine backup path based on compression setting
if [[ "$COMPRESS_TENANT" == "true" ]]; then
backup_target_dir="${backup_base_dir}/tmp"
backup_path_prefix="${backup_target_dir}/backup_${timestamp}"
@@ -114,25 +127,19 @@ perform_tenant_backup() {
backup_path_prefix="${backup_target_dir}/backup_${timestamp}"
fi
# Create the target directory if it doesn't exist
mkdir -p "$backup_target_dir"
# The USER_KEY must be configured to connect to the desired tenant database.
local query="BACKUP DATA USING FILE ('${backup_path_prefix}')"
# We redirect stdout and stderr to /dev/null for cleaner cron logs.
"$HDBSQL_PATH" -U "$USER_KEY" "$query" > /dev/null 2>&1
local exit_code=$?
if [[ "$exit_code" -eq 0 ]]; then
echo " ✅ Successfully initiated tenant backup with prefix '${backup_path_prefix}'."
# NEW: Conditional compression logic
if [[ "$COMPRESS_TENANT" == "true" ]]; then
local archive_file="${backup_base_dir}/backup_${timestamp}.tar.gz"
echo " 🗜️ Compressing backup files..."
# The backup creates multiple files starting with the prefix. We compress the whole temp dir.
# Using -C and '.' ensures we archive the contents of the directory, not the directory itself.
tar -czf "$archive_file" -C "$backup_target_dir" .
local tar_exit_code=$?
@@ -154,19 +161,17 @@ perform_tenant_backup() {
echo "⚙️ Starting HANA backup process..."
# Ensure the base directory exists
mkdir -p "$BACKUP_BASE_DIR"
case "$BACKUP_TYPE" in
schema)
perform_schema_export
run_all_schema_exports
;;
tenant)
perform_tenant_backup
;;
all)
perform_schema_export
echo "" # Add a newline for better readability
run_all_schema_exports
perform_tenant_backup
;;
*)