Files
Scripts/hanatool.sh

433 lines
16 KiB
Bash
Raw Blame History

This file contains invisible Unicode characters
This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
#!/bin/bash
# Version: 1.5.4
# ==============================================================================
# SAP HANA Schema and Tenant Management Tool (hanatool.sh)
#
# A command-line utility to quickly export/import schemas or backup a tenant.
# ==============================================================================
# --- Default Settings ---
HDBSQL_PATH="/usr/sap/hdbclient/hdbsql"
COMPRESS=false
THREADS=0 # 0 means auto-calculate later
DRY_RUN=false
NTFY_TOKEN=""
IMPORT_REPLACE=false
# --- Help/Usage Function ---
usage() {
echo "SAP HANA Schema and Tenant Management Tool"
echo ""
echo "Usage (Schema): $0 [USER_KEY] export|import [SCHEMA_NAME] [PATH] [OPTIONS]"
echo " (Schema): $0 [USER_KEY] import-rename [SCHEMA_NAME] [NEW_SCHEMA_NAME] [PATH] [OPTIONS]"
echo " (Tenant): $0 [USER_KEY] backup [PATH] [OPTIONS]"
echo ""
echo "Actions:"
echo " export Export a schema to a specified path."
echo " import Import a schema from a specified path."
echo " import-rename Import a schema from a path to a new schema name."
echo " backup Perform a full backup of the tenant."
echo ""
echo "Arguments:"
echo " USER_KEY The user key from hdbuserstore for DB connection."
echo " SCHEMA_NAME The name of the source schema."
echo " NEW_SCHEMA_NAME (Required for import-rename only) The target schema name."
echo " PATH The file system path for the export/import/backup data."
echo ""
echo "Options:"
echo " -t, --threads N Specify the number of threads (not used for 'backup')."
echo " -c, --compress Enable tar.gz compression for exports and backups."
echo " -n, --dry-run Show what commands would be executed without running them."
echo " --ntfy <token> Send a notification via ntfy.sh upon completion/failure."
echo " --replace Use the 'REPLACE' option for imports instead of 'IGNORE EXISTING'."
echo " --hdbsql <path> Specify a custom path for the hdbsql executable."
echo " -h, --help Show this help message."
echo ""
echo "Examples:"
echo " # Backup the tenant determined by MY_TENANT_KEY and compress the result"
echo " $0 MY_TENANT_KEY backup /hana/backups -c --ntfy tk_xxxxxxxxxxxx"
echo ""
echo " # Import MYSCHEMA from a compressed archive"
echo " $0 MY_SCHEMA_KEY import MYSCHEMA /hana/backups/MYSCHEMA_20240101.tar.gz -c"
echo ""
echo " # Import MYSCHEMA as MYSCHEMA_TEST, replacing any existing objects"
echo " $0 MY_SCHEMA_KEY import-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --replace"
}
# --- Notification Function ---
send_notification() {
local message="$1"
if [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "false" ]]; then
echo " Sending notification..."
curl -s -H "Authorization: Bearer $NTFY_TOKEN" -d "$message" https://ntfy.technopunk.space/sap > /dev/null
elif [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would send notification: curl -H \"Authorization: Bearer ...\" -d \"$message\" https://ntfy.technopunk.space/sap"
fi
}
# --- Function to get HANA tenant name ---
get_hana_tenant_name() {
local user_key="$1"
local hdbsql_path="$2"
local dry_run="$3"
local query="SELECT DATABASE_NAME FROM SYS.M_DATABASES;"
local tenant_name=""
if [[ "$dry_run" == "true" ]]; then
echo "[DRY RUN] Would execute hdbsql to get tenant name: \"$hdbsql_path\" -U \"$user_key\" \"$query\""
tenant_name="DRYRUN_TENANT"
else
tenant_name=$("$hdbsql_path" -U "$user_key" "$query" | tail -n +2 | head -n 1 | tr -d '[:space:]' | tr -d '"')
if [[ -z "$tenant_name" ]]; then
echo "❌ Error: Could not retrieve HANA tenant name using user key '${user_key}'."
exit 1
fi
fi
echo "$tenant_name"
}
# --- Argument Parsing ---
POSITIONAL_ARGS=()
while [[ $# -gt 0 ]]; do
case $1 in
-t|--threads)
THREADS="$2"
shift 2
;;
-c|--compress)
COMPRESS=true
shift
;;
-n|--dry-run)
DRY_RUN=true
shift
;;
--ntfy)
NTFY_TOKEN="$2"
shift 2
;;
--replace)
IMPORT_REPLACE=true
shift
;;
--hdbsql)
HDBSQL_PATH="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
POSITIONAL_ARGS+=("$1") # save positional arg
shift
;;
esac
done
set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
# Assign common positional arguments
USER_KEY="$1"
ACTION="$2"
# --- Main Logic ---
if [[ "$DRY_RUN" == "true" ]]; then
echo "⚠️ --- DRY RUN MODE ENABLED --- ⚠️"
echo "No actual commands will be executed."
echo "-------------------------------------"
fi
# Check for hdbsql executable
if [[ ! -x "$HDBSQL_PATH" ]]; then
echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'"
exit 1
fi
# Calculate default threads if not specified and action is not backup
if [[ "$THREADS" -eq 0 && "$ACTION" != "backup" ]]; then
TOTAL_THREADS=$(nproc --all)
THREADS=$((TOTAL_THREADS / 2))
if [[ "$THREADS" -eq 0 ]]; then
THREADS=1
fi
echo " Auto-detected threads to use: ${THREADS}"
fi
# Execute action based on user input
case "$ACTION" in
backup)
TARGET_PATH="$3"
if [[ -z "$USER_KEY" || -z "$TARGET_PATH" ]]; then
echo "❌ Error: Missing arguments for 'backup' action."
usage
exit 1
fi
echo "⬇️ Starting tenant backup..."
echo " - User Key: ${USER_KEY}"
echo " - Path: ${TARGET_PATH}"
echo " - Compress: ${COMPRESS}"
TENANT_NAME=$(get_hana_tenant_name "$USER_KEY" "$HDBSQL_PATH" "$DRY_RUN")
echo " - Tenant Name: ${TENANT_NAME}"
timestamp=$(date +%Y%m%d_%H%M%S)
backup_target_dir="$TARGET_PATH" # Initialize with TARGET_PATH
backup_path_prefix=""
if [[ "$COMPRESS" == "true" ]]; then
if [[ "$DRY_RUN" == "true" ]]; then
backup_target_dir="${TARGET_PATH}/${TENANT_NAME}_backup_DRYRUN_TEMP" # Use TARGET_PATH
else
backup_target_dir=$(mktemp -d "${TARGET_PATH}/${TENANT_NAME}_backup_${timestamp}_XXXXXXXX") # Use TARGET_PATH
fi
echo " Using temporary backup directory: ${backup_target_dir}"
fi
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would create directory: mkdir -p \"$backup_target_dir\""
else
mkdir -p "$backup_target_dir"
fi
backup_path_prefix="${backup_target_dir}/backup_${TENANT_NAME}_${timestamp}"
QUERY="BACKUP DATA USING FILE ('${backup_path_prefix}')"
EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
else
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
EXIT_CODE=$?
fi
if [[ "$EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully initiated tenant backup with prefix '${backup_path_prefix}'."
if [[ "$COMPRESS" == "true" ]]; then
ARCHIVE_FILE="${TARGET_PATH}/${TENANT_NAME}_backup_${timestamp}.tar.gz"
echo "🗜️ Compressing backup files to '${ARCHIVE_FILE}'..."
TAR_EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$backup_target_dir\" ."
else
tar -czf "$ARCHIVE_FILE" -C "$backup_target_dir" .
TAR_EXIT_CODE=$?
fi
if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully created archive."
echo "🧹 Cleaning up temporary directory..."
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would remove temp directory: rm -rf \"$backup_target_dir\""
else
rm -rf "$backup_target_dir"
fi
else
echo "❌ Error: Failed to create archive from '${backup_target_dir}'."
fi
fi
send_notification "✅ HANA tenant '${TENANT_NAME}' backup completed successfully."
else
echo "❌ Error: Failed to initiate tenant backup (hdbsql exit code: ${EXIT_CODE})."
send_notification "❌ HANA tenant '${TENANT_NAME}' backup FAILED."
if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$backup_target_dir"; fi
fi
;;
export)
SCHEMA_NAME="$3"
TARGET_PATH="$4"
if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$TARGET_PATH" ]]; then
echo "❌ Error: Missing arguments for 'export' action."
usage
exit 1
fi
echo "⬇️ Starting schema export..."
echo " - User Key: ${USER_KEY}"
echo " - Schema: ${SCHEMA_NAME}"
echo " - Path: ${TARGET_PATH}"
echo " - Compress: ${COMPRESS}"
echo " - Threads: ${THREADS}"
EXPORT_DIR="$TARGET_PATH"
if [[ "$COMPRESS" == "true" ]]; then
if [[ "$DRY_RUN" == "true" ]]; then
EXPORT_DIR="${TARGET_PATH}/export_${SCHEMA_NAME}_DRYRUN_TEMP"
else
EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX")
fi
echo " Using temporary export directory: ${EXPORT_DIR}"
fi
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would create directory: mkdir -p \"$EXPORT_DIR\""
else
mkdir -p "$EXPORT_DIR"
fi
QUERY="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE THREADS ${THREADS};"
EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
else
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
EXIT_CODE=$?
fi
if [[ "$EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully exported schema '${SCHEMA_NAME}' to '${EXPORT_DIR}'."
if [[ "$COMPRESS" == "true" ]]; then
ARCHIVE_FILE="${TARGET_PATH}/${SCHEMA_NAME}_$(date +%Y%m%d_%H%M%S).tar.gz"
echo "🗜️ Compressing files to '${ARCHIVE_FILE}'..."
TAR_EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$(dirname "$EXPORT_DIR")\" \"$(basename "$EXPORT_DIR")\""
else
tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")"
TAR_EXIT_CODE=$?
fi
if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully created archive."
echo "🧹 Cleaning up temporary directory..."
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would remove temp directory: rm -rf \"$EXPORT_DIR\""
else
rm -rf "$EXPORT_DIR"
fi
else
echo "❌ Error: Failed to create archive from '${EXPORT_DIR}'."
fi
fi
send_notification "✅ Export of schema '${SCHEMA_NAME}' completed successfully."
else
echo "❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${EXIT_CODE})."
send_notification "❌ Export of schema '${SCHEMA_NAME}' FAILED."
if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$EXPORT_DIR"; fi
fi
;;
import|import-rename)
SCHEMA_NAME="$3"
if [[ "$ACTION" == "import" ]]; then
SOURCE_PATH="$4"
NEW_SCHEMA_NAME=""
if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$SOURCE_PATH" ]]; then
echo "❌ Error: Missing arguments for 'import' action."
usage
exit 1
fi
else # import-rename
NEW_SCHEMA_NAME="$4"
SOURCE_PATH="$5"
if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$NEW_SCHEMA_NAME" || -z "$SOURCE_PATH" ]]; then
echo "❌ Error: Missing arguments for 'import-rename' action."
usage
exit 1
fi
fi
echo "⬆️ Starting schema import..."
echo " - User Key: ${USER_KEY}"
echo " - Source Schema: ${SCHEMA_NAME}"
if [[ -n "$NEW_SCHEMA_NAME" ]]; then
echo " - Target Schema: ${NEW_SCHEMA_NAME}"
fi
echo " - Path: ${SOURCE_PATH}"
echo " - Compress: ${COMPRESS}"
echo " - Threads: ${THREADS}"
IMPORT_DIR="$SOURCE_PATH"
if [[ "$COMPRESS" == "true" ]]; then
if [[ ! -f "$SOURCE_PATH" && "$DRY_RUN" == "false" ]]; then
echo "❌ Error: Source path '${SOURCE_PATH}' is not a valid file for compressed import."
exit 1
fi
if [[ "$DRY_RUN" == "true" ]]; then
IMPORT_DIR="/tmp/import_${SCHEMA_NAME}_DRYRUN_TEMP"
else
IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX")
fi
echo " Decompressing to temporary directory: ${IMPORT_DIR}"
TAR_EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would decompress archive: tar -xzf \"$SOURCE_PATH\" -C \"$IMPORT_DIR\" --strip-components=1"
else
tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1
TAR_EXIT_CODE=$?
fi
if [[ "$TAR_EXIT_CODE" -ne 0 ]]; then
echo "❌ Error: Failed to decompress '${SOURCE_PATH}'."
if [[ "$DRY_RUN" == "false" ]]; then rm -rf "$IMPORT_DIR"; fi
exit 1
fi
fi
if [[ ! -d "$IMPORT_DIR" && "$DRY_RUN" == "false" ]]; then
echo "❌ Error: Import directory '${IMPORT_DIR}' does not exist."
exit 1
fi
import_options=""
if [[ "$IMPORT_REPLACE" == "true" ]]; then
import_options="REPLACE"
echo " - Mode: REPLACE"
else
import_options="IGNORE EXISTING"
echo " - Mode: IGNORE EXISTING (default)"
fi
if [[ "$ACTION" == "import-rename" ]]; then
import_options="${import_options} RENAME SCHEMA \"${SCHEMA_NAME}\" TO \"${NEW_SCHEMA_NAME}\""
fi
QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' WITH ${import_options} THREADS ${THREADS};"
EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
else
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
EXIT_CODE=$?
fi
target_schema_name="${NEW_SCHEMA_NAME:-$SCHEMA_NAME}"
if [[ "$EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully imported schema."
send_notification "${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' completed successfully."
else
echo "❌ Error: Failed to import schema (hdbsql exit code: ${EXIT_CODE})."
send_notification "${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' FAILED."
fi
if [[ "$COMPRESS" == "true" ]]; then
echo "🧹 Cleaning up temporary directory..."
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would remove temp directory: rm -rf \"$IMPORT_DIR\""
else
rm -rf "$IMPORT_DIR"
fi
fi
;;
*)
echo "❌ Error: Invalid action '${ACTION}'."
usage
exit 1
;;
esac
echo "✅ Process complete."