#!/bin/bash # ============================================================================== # SAP HANA Backup Script # # Performs schema exports and/or tenant backups for a SAP HANA database. # Designed to be executed via a cronjob. # Reads all settings from the backup.conf file in the same directory. # ============================================================================== # --- Configuration and Setup --- # Find the script's own directory to locate the config file SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd) CONFIG_FILE="${SCRIPT_DIR}/backup.conf" # Check for config file and source it if [[ -f "$CONFIG_FILE" ]]; then source "$CONFIG_FILE" else echo "❌ Error: Configuration file not found at '${CONFIG_FILE}'" exit 1 fi # Check if hdbsql executable exists if [[ ! -x "$HDBSQL_PATH" ]]; then echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'" exit 1 fi # Calculate threads to use (half of the available cores, but at least 1) TOTAL_THREADS=$(nproc --all) THREADS=$((TOTAL_THREADS / 2)) if [[ "$THREADS" -eq 0 ]]; then THREADS=1 fi # --- Functions --- # Performs a binary export of a specific schema and compresses it. perform_schema_export() { echo "⬇️ Starting schema export for '${SCHEMA_NAME}'..." local timestamp timestamp=$(date +%Y%m%d_%H%M%S) local export_dir="${BACKUP_BASE_DIR}/schema" local export_path="${export_dir}/${SCHEMA_NAME}_${timestamp}" local archive_file="${export_path}.tar.gz" # Create the target directory if it doesn't exist mkdir -p "$export_dir" # Construct and execute the EXPORT query local query="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${export_path}' WITH REPLACE THREADS ${THREADS};" # We redirect stdout and stderr to /dev/null for cleaner cron logs. # Remove "> /dev/null 2>&1" if you need to debug connection issues. "$HDBSQL_PATH" -U "$USER_KEY" "$query" > /dev/null 2>&1 local exit_code=$? if [[ "$exit_code" -eq 0 ]]; then echo " ✅ Successfully exported schema '${SCHEMA_NAME}'." # Compress the exported directory echo " 🗜️ Compressing exported files..." tar -czf "$archive_file" -C "$export_dir" "$(basename "$export_path")" local tar_exit_code=$? if [[ "$tar_exit_code" -eq 0 ]]; then echo " ✅ Successfully created archive '${archive_file}'." echo " 🧹 Cleaning up temporary directory..." rm -rf "$export_path" echo " ✨ Cleanup complete." else echo " ❌ Error: Failed to compress '${export_path}'." fi else echo " ❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${exit_code})." fi } # Performs a full backup of the tenant database. perform_tenant_backup() { echo "⬇️ Starting tenant backup..." local timestamp timestamp=$(date +%Y%m%d_%H%M%S) local backup_dir="${BACKUP_BASE_DIR}/tenant" # HANA will create the final file; we just provide the location and prefix. local backup_path_prefix="${backup_dir}/backup_${timestamp}" # Create the target directory if it doesn't exist mkdir -p "$backup_dir" # The USER_KEY must be configured to connect to the desired tenant database. local query="BACKUP DATA USING FILE ('${backup_path_prefix}')" # We redirect stdout and stderr to /dev/null for cleaner cron logs. # Remove "> /dev/null 2>&1" if you need to debug connection issues. "$HDBSQL_PATH" -U "$USER_KEY" "$query" > /dev/null 2>&1 local exit_code=$? if [[ "$exit_code" -eq 0 ]]; then echo " ✅ Successfully initiated tenant backup with prefix '${backup_path_prefix}'." else echo " ❌ Error: Failed to initiate tenant backup (hdbsql exit code: ${exit_code})." fi } # --- Main Execution --- echo "⚙️ Starting HANA backup process..." # Ensure the base directory exists mkdir -p "$BACKUP_BASE_DIR" case "$BACKUP_TYPE" in schema) perform_schema_export ;; tenant) perform_tenant_backup ;; all) perform_schema_export echo "" # Add a newline for better readability perform_tenant_backup ;; *) echo " ❌ Error: Invalid BACKUP_TYPE '${BACKUP_TYPE}' in config. Use 'schema', 'tenant', or 'all'." ;; esac echo "📦 Backup process complete." echo "👋 Exiting."