396 lines
15 KiB
Bash
396 lines
15 KiB
Bash
#!/bin/bash
|
||
# Version: 1.4.1
|
||
# ==============================================================================
|
||
# SAP HANA Schema and Tenant Management Tool (hanatool.sh)
|
||
#
|
||
# A command-line utility to quickly export/import schemas or backup a tenant.
|
||
# ==============================================================================
|
||
|
||
# --- Default Settings ---
|
||
HDBSQL_PATH="/usr/sap/hdbclient/hdbsql"
|
||
COMPRESS=false
|
||
THREADS=0 # 0 means auto-calculate later
|
||
DRY_RUN=false
|
||
NTFY_TOKEN=""
|
||
|
||
# --- Help/Usage Function ---
|
||
usage() {
|
||
echo "SAP HANA Schema and Tenant Management Tool"
|
||
echo ""
|
||
echo "Usage (Schema): $0 [USER_KEY] export|import [SCHEMA_NAME] [PATH] [OPTIONS]"
|
||
echo " (Schema): $0 [USER_KEY] import-rename [SCHEMA_NAME] [NEW_SCHEMA_NAME] [PATH] [OPTIONS]"
|
||
echo " (Tenant): $0 [USER_KEY] backup [PATH] [OPTIONS]"
|
||
echo ""
|
||
echo "Actions:"
|
||
echo " export Export a schema to a specified path."
|
||
echo " import Import a schema from a specified path."
|
||
echo " import-rename Import a schema from a path to a new schema name."
|
||
echo " backup Perform a full backup of the tenant."
|
||
echo ""
|
||
echo "Arguments:"
|
||
echo " USER_KEY The user key from hdbuserstore for DB connection."
|
||
echo " SCHEMA_NAME The name of the source schema."
|
||
echo " NEW_SCHEMA_NAME (Required for import-rename only) The target schema name."
|
||
echo " PATH The file system path for the export/import/backup data."
|
||
echo ""
|
||
echo "Options:"
|
||
echo " -t, --threads N Specify the number of threads (not used for 'backup')."
|
||
echo " -c, --compress Enable tar.gz compression for exports and backups."
|
||
echo " -n, --dry-run Show what commands would be executed without running them."
|
||
echo " --ntfy <token> Send a notification via ntfy.sh upon completion/failure."
|
||
echo " --hdbsql <path> Specify a custom path for the hdbsql executable."
|
||
echo " -h, --help Show this help message."
|
||
echo ""
|
||
echo "Examples:"
|
||
echo " # Backup the tenant determined by MY_TENANT_KEY and compress the result"
|
||
echo " $0 MY_TENANT_KEY backup /hana/backups -c --ntfy tk_xxxxxxxxxxxx"
|
||
echo ""
|
||
echo " # Import MYSCHEMA from a compressed archive"
|
||
echo " $0 MY_SCHEMA_KEY import MYSCHEMA /hana/backups/MYSCHEMA_20240101.tar.gz -c"
|
||
echo ""
|
||
echo " # Import MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path"
|
||
echo " $0 MY_SCHEMA_KEY import-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --hdbsql /sap/custom/hdbsql"
|
||
}
|
||
|
||
# --- Notification Function ---
|
||
send_notification() {
|
||
local message="$1"
|
||
if [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "false" ]]; then
|
||
echo "ℹ️ Sending notification..."
|
||
curl -s -H "Authorization: Bearer $NTFY_TOKEN" -d "$message" https://ntfy.technopunk.space/sap > /dev/null
|
||
elif [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would send notification: curl -H \"Authorization: Bearer ...\" -d \"$message\" https://ntfy.technopunk.space/sap"
|
||
fi
|
||
}
|
||
|
||
# --- Argument Parsing ---
|
||
POSITIONAL_ARGS=()
|
||
while [[ $# -gt 0 ]]; do
|
||
case $1 in
|
||
-t|--threads)
|
||
THREADS="$2"
|
||
shift 2
|
||
;;
|
||
-c|--compress)
|
||
COMPRESS=true
|
||
shift
|
||
;;
|
||
-n|--dry-run)
|
||
DRY_RUN=true
|
||
shift
|
||
;;
|
||
--ntfy)
|
||
NTFY_TOKEN="$2"
|
||
shift 2
|
||
;;
|
||
--hdbsql)
|
||
HDBSQL_PATH="$2"
|
||
shift 2
|
||
;;
|
||
-h|--help)
|
||
usage
|
||
exit 0
|
||
;;
|
||
*)
|
||
POSITIONAL_ARGS+=("$1") # save positional arg
|
||
shift
|
||
;;
|
||
esac
|
||
done
|
||
set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
|
||
|
||
# Assign common positional arguments
|
||
USER_KEY="$1"
|
||
ACTION="$2"
|
||
|
||
# --- Main Logic ---
|
||
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "⚠️ --- DRY RUN MODE ENABLED --- ⚠️"
|
||
echo "No actual commands will be executed."
|
||
echo "-------------------------------------"
|
||
fi
|
||
|
||
# Check for hdbsql executable
|
||
if [[ ! -x "$HDBSQL_PATH" ]]; then
|
||
echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'"
|
||
exit 1
|
||
fi
|
||
|
||
# Calculate default threads if not specified and action is not backup
|
||
if [[ "$THREADS" -eq 0 && "$ACTION" != "backup" ]]; then
|
||
TOTAL_THREADS=$(nproc --all)
|
||
THREADS=$((TOTAL_THREADS / 2))
|
||
if [[ "$THREADS" -eq 0 ]]; then
|
||
THREADS=1
|
||
fi
|
||
echo "ℹ️ Auto-detected threads to use: ${THREADS}"
|
||
fi
|
||
|
||
# Execute action based on user input
|
||
case "$ACTION" in
|
||
backup)
|
||
TARGET_PATH="$3"
|
||
if [[ -z "$USER_KEY" || -z "$TARGET_PATH" ]]; then
|
||
echo "❌ Error: Missing arguments for 'backup' action."
|
||
usage
|
||
exit 1
|
||
fi
|
||
|
||
echo "⬇️ Starting tenant backup..."
|
||
echo " - User Key: ${USER_KEY}"
|
||
echo " - Path: ${TARGET_PATH}"
|
||
echo " - Compress: ${COMPRESS}"
|
||
|
||
timestamp=$(date +%Y%m%d_%H%M%S)
|
||
local backup_target_dir
|
||
local backup_path_prefix
|
||
|
||
if [[ "$COMPRESS" == "true" ]]; then
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
backup_target_dir="/tmp/tenant_backup_DRYRUN_TEMP"
|
||
else
|
||
backup_target_dir=$(mktemp -d "/tmp/tenant_backup_${timestamp}_XXXXXXXX")
|
||
fi
|
||
echo "ℹ️ Using temporary backup directory: ${backup_target_dir}"
|
||
else
|
||
backup_target_dir="$TARGET_PATH"
|
||
fi
|
||
|
||
if [[ "$DRY_RUN" == "true" && "$COMPRESS" == "false" ]]; then
|
||
echo "[DRY RUN] Would create directory: mkdir -p \"$backup_target_dir\""
|
||
else
|
||
mkdir -p "$backup_target_dir"
|
||
fi
|
||
|
||
backup_path_prefix="${backup_target_dir}/backup_${timestamp}"
|
||
|
||
QUERY="BACKUP DATA USING FILE ('${backup_path_prefix}')"
|
||
|
||
EXIT_CODE=0
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
|
||
else
|
||
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
|
||
EXIT_CODE=$?
|
||
fi
|
||
|
||
if [[ "$EXIT_CODE" -eq 0 ]]; then
|
||
echo "✅ Successfully initiated tenant backup with prefix '${backup_path_prefix}'."
|
||
if [[ "$COMPRESS" == "true" ]]; then
|
||
ARCHIVE_FILE="${TARGET_PATH}/tenant_backup_${timestamp}.tar.gz"
|
||
echo "🗜️ Compressing backup files to '${ARCHIVE_FILE}'..."
|
||
|
||
TAR_EXIT_CODE=0
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$backup_target_dir\" ."
|
||
else
|
||
tar -czf "$ARCHIVE_FILE" -C "$backup_target_dir" .
|
||
TAR_EXIT_CODE=$?
|
||
fi
|
||
|
||
if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then
|
||
echo "✅ Successfully created archive."
|
||
echo "🧹 Cleaning up temporary directory..."
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would remove temp directory: rm -rf \"$backup_target_dir\""
|
||
else
|
||
rm -rf "$backup_target_dir"
|
||
fi
|
||
else
|
||
echo "❌ Error: Failed to create archive from '${backup_target_dir}'."
|
||
fi
|
||
fi
|
||
send_notification "✅ Tenant backup for user key '${USER_KEY}' completed successfully."
|
||
else
|
||
echo "❌ Error: Failed to initiate tenant backup (hdbsql exit code: ${EXIT_CODE})."
|
||
send_notification "❌ Tenant backup for user key '${USER_KEY}' FAILED."
|
||
if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$backup_target_dir"; fi
|
||
fi
|
||
;;
|
||
|
||
export)
|
||
SCHEMA_NAME="$3"
|
||
TARGET_PATH="$4"
|
||
if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$TARGET_PATH" ]]; then
|
||
echo "❌ Error: Missing arguments for 'export' action."
|
||
usage
|
||
exit 1
|
||
fi
|
||
|
||
echo "⬇️ Starting schema export..."
|
||
echo " - User Key: ${USER_KEY}"
|
||
echo " - Schema: ${SCHEMA_NAME}"
|
||
echo " - Path: ${TARGET_PATH}"
|
||
echo " - Compress: ${COMPRESS}"
|
||
echo " - Threads: ${THREADS}"
|
||
|
||
EXPORT_DIR="$TARGET_PATH"
|
||
if [[ "$COMPRESS" == "true" ]]; then
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
EXPORT_DIR="${TARGET_PATH}/export_${SCHEMA_NAME}_DRYRUN_TEMP"
|
||
else
|
||
EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX")
|
||
fi
|
||
echo "ℹ️ Using temporary export directory: ${EXPORT_DIR}"
|
||
fi
|
||
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would create directory: mkdir -p \"$EXPORT_DIR\""
|
||
else
|
||
mkdir -p "$EXPORT_DIR"
|
||
fi
|
||
|
||
QUERY="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE THREADS ${THREADS};"
|
||
|
||
EXIT_CODE=0
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
|
||
else
|
||
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
|
||
EXIT_CODE=$?
|
||
fi
|
||
|
||
if [[ "$EXIT_CODE" -eq 0 ]]; then
|
||
echo "✅ Successfully exported schema '${SCHEMA_NAME}' to '${EXPORT_DIR}'."
|
||
if [[ "$COMPRESS" == "true" ]]; then
|
||
ARCHIVE_FILE="${TARGET_PATH}/${SCHEMA_NAME}_$(date +%Y%m%d_%H%M%S).tar.gz"
|
||
echo "🗜️ Compressing files to '${ARCHIVE_FILE}'..."
|
||
|
||
TAR_EXIT_CODE=0
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$(dirname "$EXPORT_DIR")\" \"$(basename "$EXPORT_DIR")\""
|
||
else
|
||
tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")"
|
||
TAR_EXIT_CODE=$?
|
||
fi
|
||
|
||
if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then
|
||
echo "✅ Successfully created archive."
|
||
echo "🧹 Cleaning up temporary directory..."
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would remove temp directory: rm -rf \"$EXPORT_DIR\""
|
||
else
|
||
rm -rf "$EXPORT_DIR"
|
||
fi
|
||
else
|
||
echo "❌ Error: Failed to create archive from '${EXPORT_DIR}'."
|
||
fi
|
||
fi
|
||
send_notification "✅ Export of schema '${SCHEMA_NAME}' completed successfully."
|
||
else
|
||
echo "❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${EXIT_CODE})."
|
||
send_notification "❌ Export of schema '${SCHEMA_NAME}' FAILED."
|
||
if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$EXPORT_DIR"; fi
|
||
fi
|
||
;;
|
||
|
||
import|import-rename)
|
||
SCHEMA_NAME="$3"
|
||
if [[ "$ACTION" == "import" ]]; then
|
||
SOURCE_PATH="$4"
|
||
NEW_SCHEMA_NAME=""
|
||
if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$SOURCE_PATH" ]]; then
|
||
echo "❌ Error: Missing arguments for 'import' action."
|
||
usage
|
||
exit 1
|
||
fi
|
||
else # import-rename
|
||
NEW_SCHEMA_NAME="$4"
|
||
SOURCE_PATH="$5"
|
||
if [[ -z "$USER_KEY" || -z "$SCHEMA_NAME" || -z "$NEW_SCHEMA_NAME" || -z "$SOURCE_PATH" ]]; then
|
||
echo "❌ Error: Missing arguments for 'import-rename' action."
|
||
usage
|
||
exit 1
|
||
fi
|
||
fi
|
||
|
||
echo "⬆️ Starting schema import..."
|
||
echo " - User Key: ${USER_KEY}"
|
||
echo " - Source Schema: ${SCHEMA_NAME}"
|
||
if [[ -n "$NEW_SCHEMA_NAME" ]]; then
|
||
echo " - Target Schema: ${NEW_SCHEMA_NAME}"
|
||
fi
|
||
echo " - Path: ${SOURCE_PATH}"
|
||
echo " - Compress: ${COMPRESS}"
|
||
echo " - Threads: ${THREADS}"
|
||
|
||
IMPORT_DIR="$SOURCE_PATH"
|
||
if [[ "$COMPRESS" == "true" ]]; then
|
||
if [[ ! -f "$SOURCE_PATH" && "$DRY_RUN" == "false" ]]; then
|
||
echo "❌ Error: Source path '${SOURCE_PATH}' is not a valid file for compressed import."
|
||
exit 1
|
||
fi
|
||
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
IMPORT_DIR="/tmp/import_${SCHEMA_NAME}_DRYRUN_TEMP"
|
||
else
|
||
IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX")
|
||
fi
|
||
|
||
echo "ℹ️ Decompressing to temporary directory: ${IMPORT_DIR}"
|
||
|
||
TAR_EXIT_CODE=0
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would decompress archive: tar -xzf \"$SOURCE_PATH\" -C \"$IMPORT_DIR\" --strip-components=1"
|
||
else
|
||
tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1
|
||
TAR_EXIT_CODE=$?
|
||
fi
|
||
|
||
if [[ "$TAR_EXIT_CODE" -ne 0 ]]; then
|
||
echo "❌ Error: Failed to decompress '${SOURCE_PATH}'."
|
||
if [[ "$DRY_RUN" == "false" ]]; then rm -rf "$IMPORT_DIR"; fi
|
||
exit 1
|
||
fi
|
||
fi
|
||
|
||
if [[ ! -d "$IMPORT_DIR" && "$DRY_RUN" == "false" ]]; then
|
||
echo "❌ Error: Import directory '${IMPORT_DIR}' does not exist."
|
||
exit 1
|
||
fi
|
||
|
||
QUERY_RENAME_PART=""
|
||
if [[ "$ACTION" == "import-rename" ]]; then
|
||
QUERY_RENAME_PART="WITH RENAME SCHEMA \"${SCHEMA_NAME}\" TO \"${NEW_SCHEMA_NAME}\""
|
||
fi
|
||
|
||
QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' ${QUERY_RENAME_PART} WITH IGNORE EXISTING THREADS ${THREADS};"
|
||
|
||
EXIT_CODE=0
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
|
||
else
|
||
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
|
||
EXIT_CODE=$?
|
||
fi
|
||
|
||
local target_schema_name="${NEW_SCHEMA_NAME:-$SCHEMA_NAME}"
|
||
if [[ "$EXIT_CODE" -eq 0 ]]; then
|
||
echo "✅ Successfully imported schema."
|
||
send_notification "✅ ${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' completed successfully."
|
||
else
|
||
echo "❌ Error: Failed to import schema (hdbsql exit code: ${EXIT_CODE})."
|
||
send_notification "❌ ${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' FAILED."
|
||
fi
|
||
|
||
if [[ "$COMPRESS" == "true" ]]; then
|
||
echo "🧹 Cleaning up temporary directory..."
|
||
if [[ "$DRY_RUN" == "true" ]]; then
|
||
echo "[DRY RUN] Would remove temp directory: rm -rf \"$IMPORT_DIR\""
|
||
else
|
||
rm -rf "$IMPORT_DIR"
|
||
fi
|
||
fi
|
||
;;
|
||
|
||
*)
|
||
echo "❌ Error: Invalid action '${ACTION}'."
|
||
usage
|
||
exit 1
|
||
;;
|
||
esac
|
||
|
||
echo "✅ Process complete."
|
||
|