#!/bin/bash # Version: 1.3.0 # ============================================================================== # SAP HANA Schema Management Tool (hanatool.sh) # # A command-line utility to quickly export, restore, or restore-and-rename # a SAP HANA schema. # ============================================================================== # --- Default Settings --- HDBSQL_PATH="/usr/sap/hdbclient/hdbsql" COMPRESS=false THREADS=0 # 0 means auto-calculate later DRY_RUN=false NTFY_TOKEN="" # --- Help/Usage Function --- usage() { echo "SAP HANA Schema Management Tool" echo "" echo "Usage: $0 [USER_KEY] [ACTION] [SCHEMA_NAME] [PATH] [OPTIONS]" echo " $0 [USER_KEY] restore-rename [SCHEMA_NAME] [NEW_SCHEMA_NAME] [PATH] [OPTIONS]" echo "" echo "Actions:" echo " export Export a schema to a specified path." echo " restore Restore a schema from a specified path." echo " restore-rename Restore a schema from a path to a new schema name." echo "" echo "Arguments:" echo " USER_KEY The user key from hdbuserstore for DB connection." echo " SCHEMA_NAME The name of the source schema." echo " NEW_SCHEMA_NAME (Required for restore-rename only) The target schema name." echo " PATH The file system path for the export/import data." echo "" echo "Options:" echo " -t, --threads N Specify the number of threads. Defaults to half of system cores." echo " -c, --compress Enable tar.gz compression for exports and decompression for imports." echo " -n, --dry-run Show what commands would be executed without running them." echo " --ntfy Send a notification via ntfy.sh upon completion/failure." echo " --hdbsql Specify a custom path for the hdbsql executable." echo " -h, --help Show this help message." echo "" echo "Examples:" echo " # Export MYSCHEMA and send a notification on completion" echo " $0 MYKEY export MYSCHEMA /hana/backups -c --ntfy tk_xxxxxxxxxxxx" echo "" echo " # Restore MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path" echo " $0 MYKEY restore-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --hdbsql /sap/custom/hdbsql" } # --- Notification Function --- send_notification() { local message="$1" if [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "false" ]]; then echo "ℹ️ Sending notification..." curl -s -H "Authorization: Bearer $NTFY_TOKEN" -d "$message" https://ntfy.technopunk.space/sap > /dev/null elif [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would send notification: curl -H \"Authorization: Bearer ...\" -d \"$message\" https://ntfy.technopunk.space/sap" fi } # --- Argument Parsing --- POSITIONAL_ARGS=() while [[ $# -gt 0 ]]; do case $1 in -t|--threads) THREADS="$2" shift 2 ;; -c|--compress) COMPRESS=true shift ;; -n|--dry-run) DRY_RUN=true shift ;; --ntfy) NTFY_TOKEN="$2" shift 2 ;; --hdbsql) HDBSQL_PATH="$2" shift 2 ;; -h|--help) usage exit 0 ;; *) POSITIONAL_ARGS+=("$1") # save positional arg shift ;; esac done set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters # Assign positional arguments to variables USER_KEY="$1" ACTION="$2" SCHEMA_NAME="$3" # --- Main Logic --- if [[ "$DRY_RUN" == "true" ]]; then echo "⚠️ --- DRY RUN MODE ENABLED --- ⚠️" echo "No actual commands will be executed." echo "-------------------------------------" fi # Check for hdbsql executable if [[ ! -x "$HDBSQL_PATH" ]]; then echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'" exit 1 fi # Calculate default threads if not specified if [[ "$THREADS" -eq 0 ]]; then TOTAL_THREADS=$(nproc --all) THREADS=$((TOTAL_THREADS / 2)) if [[ "$THREADS" -eq 0 ]]; then THREADS=1 fi echo "ℹ️ Auto-detected threads to use: ${THREADS}" fi # Execute action based on user input case "$ACTION" in export) TARGET_PATH="$4" if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$TARGET_PATH" ]]; then echo "❌ Error: Missing arguments for 'export' action." usage exit 1 fi echo "⬇️ Starting schema export..." echo " - User Key: ${USER_KEY}" echo " - Schema: ${SCHEMA_NAME}" echo " - Path: ${TARGET_PATH}" echo " - Compress: ${COMPRESS}" echo " - Threads: ${THREADS}" EXPORT_DIR="$TARGET_PATH" if [[ "$COMPRESS" == "true" ]]; then if [[ "$DRY_RUN" == "true" ]]; then EXPORT_DIR="${TARGET_PATH}/export_${SCHEMA_NAME}_DRYRUN_TEMP" else EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX") fi echo "ℹ️ Using temporary export directory: ${EXPORT_DIR}" fi if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would create directory: mkdir -p \"$EXPORT_DIR\"" else mkdir -p "$EXPORT_DIR" fi QUERY="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE THREADS ${THREADS};" EXIT_CODE=0 if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\"" else "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 EXIT_CODE=$? fi if [[ "$EXIT_CODE" -eq 0 ]]; then echo "✅ Successfully exported schema '${SCHEMA_NAME}' to '${EXPORT_DIR}'." if [[ "$COMPRESS" == "true" ]]; then ARCHIVE_FILE="${TARGET_PATH}/${SCHEMA_NAME}_$(date +%Y%m%d_%H%M%S).tar.gz" echo "🗜️ Compressing files to '${ARCHIVE_FILE}'..." TAR_EXIT_CODE=0 if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$(dirname "$EXPORT_DIR")\" \"$(basename "$EXPORT_DIR")\"" else tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")" TAR_EXIT_CODE=$? fi if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then echo "✅ Successfully created archive." echo "🧹 Cleaning up temporary directory..." if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would remove temp directory: rm -rf \"$EXPORT_DIR\"" else rm -rf "$EXPORT_DIR" fi else echo "❌ Error: Failed to create archive from '${EXPORT_DIR}'." fi fi send_notification "✅ Export of schema '${SCHEMA_NAME}' completed successfully." else echo "❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${EXIT_CODE})." send_notification "❌ Export of schema '${SCHEMA_NAME}' FAILED." if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$EXPORT_DIR"; fi fi ;; restore|restore-rename) if [[ "$ACTION" == "restore" ]]; then SOURCE_PATH="$4" NEW_SCHEMA_NAME="" if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$SOURCE_PATH" ]]; then echo "❌ Error: Missing arguments for 'restore' action." usage exit 1 fi else # restore-rename NEW_SCHEMA_NAME="$4" SOURCE_PATH="$5" if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$NEW_SCHEMA_NAME" || -z "$SOURCE_PATH" ]]; then echo "❌ Error: Missing arguments for 'restore-rename' action." usage exit 1 fi fi echo "⬆️ Starting schema import..." echo " - User Key: ${USER_KEY}" echo " - Source Schema: ${SCHEMA_NAME}" if [[ -n "$NEW_SCHEMA_NAME" ]]; then echo " - Target Schema: ${NEW_SCHEMA_NAME}" fi echo " - Path: ${SOURCE_PATH}" echo " - Compress: ${COMPRESS}" echo " - Threads: ${THREADS}" IMPORT_DIR="$SOURCE_PATH" if [[ "$COMPRESS" == "true" ]]; then if [[ ! -f "$SOURCE_PATH" && "$DRY_RUN" == "false" ]]; then echo "❌ Error: Source path '${SOURCE_PATH}' is not a valid file for compressed import." exit 1 fi if [[ "$DRY_RUN" == "true" ]]; then IMPORT_DIR="/tmp/import_${SCHEMA_NAME}_DRYRUN_TEMP" else IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX") fi echo "ℹ️ Decompressing to temporary directory: ${IMPORT_DIR}" TAR_EXIT_CODE=0 if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would decompress archive: tar -xzf \"$SOURCE_PATH\" -C \"$IMPORT_DIR\" --strip-components=1" else tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1 TAR_EXIT_CODE=$? fi if [[ "$TAR_EXIT_CODE" -ne 0 ]]; then echo "❌ Error: Failed to decompress '${SOURCE_PATH}'." if [[ "$DRY_RUN" == "false" ]]; then rm -rf "$IMPORT_DIR"; fi exit 1 fi fi if [[ ! -d "$IMPORT_DIR" && "$DRY_RUN" == "false" ]]; then echo "❌ Error: Import directory '${IMPORT_DIR}' does not exist." exit 1 fi QUERY_RENAME_PART="" if [[ "$ACTION" == "restore-rename" ]]; then QUERY_RENAME_PART="WITH RENAME SCHEMA \"${SCHEMA_NAME}\" TO \"${NEW_SCHEMA_NAME}\"" fi QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' ${QUERY_RENAME_PART} WITH IGNORE EXISTING THREADS ${THREADS};" EXIT_CODE=0 if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\"" else "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 EXIT_CODE=$? fi local target_schema_name="${NEW_SCHEMA_NAME:-$SCHEMA_NAME}" if [[ "$EXIT_CODE" -eq 0 ]]; then echo "✅ Successfully imported schema." send_notification "✅ ${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' completed successfully." else echo "❌ Error: Failed to import schema (hdbsql exit code: ${EXIT_CODE})." send_notification "❌ ${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' FAILED." fi if [[ "$COMPRESS" == "true" ]]; then echo "🧹 Cleaning up temporary directory..." if [[ "$DRY_RUN" == "true" ]]; then echo "[DRY RUN] Would remove temp directory: rm -rf \"$IMPORT_DIR\"" else rm -rf "$IMPORT_DIR" fi fi ;; *) echo "❌ Error: Invalid action '${ACTION}'." usage exit 1 ;; esac echo "✅ Process complete."