add compression to backup

This commit is contained in:
2025-09-09 17:21:49 +02:00
parent 1d9a933b73
commit 17ed3b8e9d
2 changed files with 79 additions and 31 deletions

View File

@@ -11,7 +11,6 @@ HDBSQL_PATH="/usr/sap/hdbclient/hdbsql"
# This key should be configured to connect to the target tenant database.
USER_KEY="CRONKEY"
# --- Backup Settings ---
# The base directory where all backup files and directories will be stored.
@@ -26,6 +25,13 @@ BACKUP_BASE_DIR="/hana/backups/automated"
# 'all' - Performs both the schema export and the tenant backup.
BACKUP_TYPE="all"
# Schema can be compressed after exporting, decreasing it's size.
COMPRESS_SCHEMA=true
# Same as COMPRESS_SCHEMA, but for the tenant. This can take a long time.
# After testing this can save ~80% disk space. Takes around 5-10 minutes
# for a 40GB tenant.
COMPRESS_TENANT=true
# --- Target Identifiers ---

View File

@@ -37,43 +37,58 @@ fi
# --- Functions ---
# Performs a binary export of a specific schema and compresses it.
# Performs a binary export of a specific schema.
perform_schema_export() {
echo "⬇️ Starting schema export for '${SCHEMA_NAME}'..."
local timestamp
timestamp=$(date +%Y%m%d_%H%M%S)
local export_dir="${BACKUP_BASE_DIR}/schema"
local export_path="${export_dir}/${SCHEMA_NAME}_${timestamp}"
local archive_file="${export_path}.tar.gz"
local export_base_dir="${BACKUP_BASE_DIR}/schema"
local export_path="${export_base_dir}/${SCHEMA_NAME}_${timestamp}"
local query_export_path="$export_path" # Default path for the EXPORT query
# NEW: If compression is enabled, export to a temporary directory
if [[ "$COMPRESS_SCHEMA" == "true" ]]; then
export_path="${export_base_dir}/tmp/${SCHEMA_NAME}_${timestamp}"
query_export_path="$export_path"
echo " Compression enabled. Using temporary export path: ${export_path}"
fi
local archive_file="${export_base_dir}/${SCHEMA_NAME}_${timestamp}.tar.gz"
# Create the target directory if it doesn't exist
mkdir -p "$export_dir"
mkdir -p "$(dirname "$export_path")"
# Construct and execute the EXPORT query
local query="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${export_path}' WITH REPLACE THREADS ${THREADS};"
local query="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${query_export_path}' WITH REPLACE THREADS ${THREADS};"
# We redirect stdout and stderr to /dev/null for cleaner cron logs.
# Remove "> /dev/null 2>&1" if you need to debug connection issues.
"$HDBSQL_PATH" -U "$USER_KEY" "$query" > /dev/null 2>&1
local exit_code=$?
if [[ "$exit_code" -eq 0 ]]; then
echo " ✅ Successfully exported schema '${SCHEMA_NAME}'."
# Compress the exported directory
# NEW: Conditional compression logic
if [[ "$COMPRESS_SCHEMA" == "true" ]]; then
echo " 🗜️ Compressing exported files..."
tar -czf "$archive_file" -C "$export_dir" "$(basename "$export_path")"
# Use -C to change directory, ensuring the archive doesn't contain the 'tmp' path
tar -czf "$archive_file" -C "$(dirname "$export_path")" "$(basename "$export_path")"
local tar_exit_code=$?
if [[ "$tar_exit_code" -eq 0 ]]; then
echo " ✅ Successfully created archive '${archive_file}'."
echo " 🧹 Cleaning up temporary directory..."
rm -rf "$export_path"
# Clean up the tmp parent if it's empty
rmdir --ignore-fail-on-non-empty "$(dirname "$export_path")"
echo " ✨ Cleanup complete."
else
echo " ❌ Error: Failed to compress '${export_path}'."
fi
else
echo " Compression disabled. Raw export files are located at '${export_path}'."
fi
else
echo " ❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${exit_code})."
fi
@@ -85,23 +100,51 @@ perform_tenant_backup() {
local timestamp
timestamp=$(date +%Y%m%d_%H%M%S)
local backup_dir="${BACKUP_BASE_DIR}/tenant"
# HANA will create the final file; we just provide the location and prefix.
local backup_path_prefix="${backup_dir}/backup_${timestamp}"
local backup_base_dir="${BACKUP_BASE_DIR}/tenant"
local backup_path_prefix
local backup_target_dir
# NEW: Determine backup path based on compression setting
if [[ "$COMPRESS_TENANT" == "true" ]]; then
backup_target_dir="${backup_base_dir}/tmp"
backup_path_prefix="${backup_target_dir}/backup_${timestamp}"
echo " Compression enabled. Using temporary backup path: ${backup_path_prefix}"
else
backup_target_dir="$backup_base_dir"
backup_path_prefix="${backup_target_dir}/backup_${timestamp}"
fi
# Create the target directory if it doesn't exist
mkdir -p "$backup_dir"
mkdir -p "$backup_target_dir"
# The USER_KEY must be configured to connect to the desired tenant database.
local query="BACKUP DATA USING FILE ('${backup_path_prefix}')"
# We redirect stdout and stderr to /dev/null for cleaner cron logs.
# Remove "> /dev/null 2>&1" if you need to debug connection issues.
"$HDBSQL_PATH" -U "$USER_KEY" "$query" > /dev/null 2>&1
local exit_code=$?
if [[ "$exit_code" -eq 0 ]]; then
echo " ✅ Successfully initiated tenant backup with prefix '${backup_path_prefix}'."
# NEW: Conditional compression logic
if [[ "$COMPRESS_TENANT" == "true" ]]; then
local archive_file="${backup_base_dir}/backup_${timestamp}.tar.gz"
echo " 🗜️ Compressing backup files..."
# The backup creates multiple files starting with the prefix. We compress the whole temp dir.
# Using -C and '.' ensures we archive the contents of the directory, not the directory itself.
tar -czf "$archive_file" -C "$backup_target_dir" .
local tar_exit_code=$?
if [[ "$tar_exit_code" -eq 0 ]]; then
echo " ✅ Successfully created archive '${archive_file}'."
echo " 🧹 Cleaning up temporary directory..."
rm -rf "$backup_target_dir"
echo " ✨ Cleanup complete."
else
echo " ❌ Error: Failed to compress backup files in '${backup_target_dir}'."
fi
fi
else
echo " ❌ Error: Failed to initiate tenant backup (hdbsql exit code: ${exit_code})."
fi
@@ -133,4 +176,3 @@ esac
echo "📦 Backup process complete."
echo "👋 Exiting."