update hanatool, add notification and dry-run

This commit is contained in:
2025-09-22 14:36:08 +02:00
parent 95e86f3e60
commit b81915190b
2 changed files with 107 additions and 34 deletions

View File

@@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
# Version: 1.0.4 # Version: 1.3.0
# ============================================================================== # ==============================================================================
# SAP HANA Schema Management Tool (hanatool.sh) # SAP HANA Schema Management Tool (hanatool.sh)
# #
@@ -11,6 +11,8 @@
HDBSQL_PATH="/usr/sap/hdbclient/hdbsql" HDBSQL_PATH="/usr/sap/hdbclient/hdbsql"
COMPRESS=false COMPRESS=false
THREADS=0 # 0 means auto-calculate later THREADS=0 # 0 means auto-calculate later
DRY_RUN=false
NTFY_TOKEN=""
# --- Help/Usage Function --- # --- Help/Usage Function ---
usage() { usage() {
@@ -33,37 +35,53 @@ usage() {
echo "Options:" echo "Options:"
echo " -t, --threads N Specify the number of threads. Defaults to half of system cores." echo " -t, --threads N Specify the number of threads. Defaults to half of system cores."
echo " -c, --compress Enable tar.gz compression for exports and decompression for imports." echo " -c, --compress Enable tar.gz compression for exports and decompression for imports."
echo " -n, --dry-run Show what commands would be executed without running them."
echo " --ntfy <token> Send a notification via ntfy.sh upon completion/failure."
echo " --hdbsql <path> Specify a custom path for the hdbsql executable." echo " --hdbsql <path> Specify a custom path for the hdbsql executable."
echo " -h, --help Show this help message." echo " -h, --help Show this help message."
echo "" echo ""
echo "Examples:" echo "Examples:"
echo " # Export MYSCHEMA to /hana/backups/temp_export using compression and 8 threads" echo " # Export MYSCHEMA and send a notification on completion"
echo " $0 MYKEY export MYSCHEMA /hana/backups/temp_export -c -t 8" echo " $0 MYKEY export MYSCHEMA /hana/backups -c --ntfy tk_xxxxxxxxxxxx"
echo "" echo ""
echo " # Restore MYSCHEMA from a compressed archive" echo " # Restore MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path"
echo " $0 MYKEY restore MYSCHEMA /hana/backups/MYSCHEMA_20240101.tar.gz -c"
echo ""
echo " # Import MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path"
echo " $0 MYKEY restore-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --hdbsql /sap/custom/hdbsql" echo " $0 MYKEY restore-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --hdbsql /sap/custom/hdbsql"
} }
# --- Notification Function ---
send_notification() {
local message="$1"
if [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "false" ]]; then
echo " Sending notification..."
curl -s -H "Authorization: Bearer $NTFY_TOKEN" -d "$message" https://ntfy.technopunk.space/sap > /dev/null
elif [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would send notification: curl -H \"Authorization: Bearer ...\" -d \"$message\" https://ntfy.technopunk.space/sap"
fi
}
# --- Argument Parsing --- # --- Argument Parsing ---
POSITIONAL_ARGS=() POSITIONAL_ARGS=()
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case $1 in case $1 in
-t|--threads) -t|--threads)
THREADS="$2" THREADS="$2"
shift # past argument shift 2
shift # past value
;; ;;
-c|--compress) -c|--compress)
COMPRESS=true COMPRESS=true
shift # past argument shift
;;
-n|--dry-run)
DRY_RUN=true
shift
;;
--ntfy)
NTFY_TOKEN="$2"
shift 2
;; ;;
--hdbsql) --hdbsql)
HDBSQL_PATH="$2" HDBSQL_PATH="$2"
shift # past argument shift 2
shift # past value
;; ;;
-h|--help) -h|--help)
usage usage
@@ -71,7 +89,7 @@ while [[ $# -gt 0 ]]; do
;; ;;
*) *)
POSITIONAL_ARGS+=("$1") # save positional arg POSITIONAL_ARGS+=("$1") # save positional arg
shift # past argument shift
;; ;;
esac esac
done done
@@ -84,6 +102,12 @@ SCHEMA_NAME="$3"
# --- Main Logic --- # --- Main Logic ---
if [[ "$DRY_RUN" == "true" ]]; then
echo "⚠️ --- DRY RUN MODE ENABLED --- ⚠️"
echo "No actual commands will be executed."
echo "-------------------------------------"
fi
# Check for hdbsql executable # Check for hdbsql executable
if [[ ! -x "$HDBSQL_PATH" ]]; then if [[ ! -x "$HDBSQL_PATH" ]]; then
echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'" echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'"
@@ -119,37 +143,61 @@ case "$ACTION" in
EXPORT_DIR="$TARGET_PATH" EXPORT_DIR="$TARGET_PATH"
if [[ "$COMPRESS" == "true" ]]; then if [[ "$COMPRESS" == "true" ]]; then
# Use a temporary directory for the raw export if [[ "$DRY_RUN" == "true" ]]; then
EXPORT_DIR="${TARGET_PATH}/export_${SCHEMA_NAME}_DRYRUN_TEMP"
else
EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX") EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX")
fi
echo " Using temporary export directory: ${EXPORT_DIR}" echo " Using temporary export directory: ${EXPORT_DIR}"
fi fi
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would create directory: mkdir -p \"$EXPORT_DIR\""
else
mkdir -p "$EXPORT_DIR" mkdir -p "$EXPORT_DIR"
fi
QUERY="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE THREADS ${THREADS};" QUERY="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE THREADS ${THREADS};"
EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
else
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
EXIT_CODE=$? EXIT_CODE=$?
fi
if [[ "$EXIT_CODE" -eq 0 ]]; then if [[ "$EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully exported schema '${SCHEMA_NAME}' to '${EXPORT_DIR}'." echo "✅ Successfully exported schema '${SCHEMA_NAME}' to '${EXPORT_DIR}'."
if [[ "$COMPRESS" == "true" ]]; then if [[ "$COMPRESS" == "true" ]]; then
ARCHIVE_FILE="${TARGET_PATH}/${SCHEMA_NAME}_$(date +%Y%m%d_%H%M%S).tar.gz" ARCHIVE_FILE="${TARGET_PATH}/${SCHEMA_NAME}_$(date +%Y%m%d_%H%M%S).tar.gz"
echo "🗜️ Compressing files to '${ARCHIVE_FILE}'..." echo "🗜️ Compressing files to '${ARCHIVE_FILE}'..."
TAR_EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$(dirname "$EXPORT_DIR")\" \"$(basename "$EXPORT_DIR")\""
else
tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")" tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")"
TAR_EXIT_CODE=$? TAR_EXIT_CODE=$?
fi
if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully created archive." echo "✅ Successfully created archive."
echo "🧹 Cleaning up temporary directory..." echo "🧹 Cleaning up temporary directory..."
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would remove temp directory: rm -rf \"$EXPORT_DIR\""
else
rm -rf "$EXPORT_DIR" rm -rf "$EXPORT_DIR"
fi
else else
echo "❌ Error: Failed to create archive from '${EXPORT_DIR}'." echo "❌ Error: Failed to create archive from '${EXPORT_DIR}'."
fi fi
fi fi
send_notification "✅ Export of schema '${SCHEMA_NAME}' completed successfully."
else else
echo "❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${EXIT_CODE})." echo "❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${EXIT_CODE})."
# Clean up temp dir on failure send_notification "❌ Export of schema '${SCHEMA_NAME}' FAILED."
if [[ "$COMPRESS" == "true" ]]; then rm -rf "$EXPORT_DIR"; fi if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$EXPORT_DIR"; fi
fi fi
;; ;;
@@ -184,22 +232,35 @@ case "$ACTION" in
IMPORT_DIR="$SOURCE_PATH" IMPORT_DIR="$SOURCE_PATH"
if [[ "$COMPRESS" == "true" ]]; then if [[ "$COMPRESS" == "true" ]]; then
if [[ ! -f "$SOURCE_PATH" ]]; then if [[ ! -f "$SOURCE_PATH" && "$DRY_RUN" == "false" ]]; then
echo "❌ Error: Source path '${SOURCE_PATH}' is not a valid file for compressed import." echo "❌ Error: Source path '${SOURCE_PATH}' is not a valid file for compressed import."
exit 1 exit 1
fi fi
if [[ "$DRY_RUN" == "true" ]]; then
IMPORT_DIR="/tmp/import_${SCHEMA_NAME}_DRYRUN_TEMP"
else
IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX") IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX")
fi
echo " Decompressing to temporary directory: ${IMPORT_DIR}" echo " Decompressing to temporary directory: ${IMPORT_DIR}"
TAR_EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would decompress archive: tar -xzf \"$SOURCE_PATH\" -C \"$IMPORT_DIR\" --strip-components=1"
else
tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1 tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1
TAR_EXIT_CODE=$? TAR_EXIT_CODE=$?
fi
if [[ "$TAR_EXIT_CODE" -ne 0 ]]; then if [[ "$TAR_EXIT_CODE" -ne 0 ]]; then
echo "❌ Error: Failed to decompress '${SOURCE_PATH}'." echo "❌ Error: Failed to decompress '${SOURCE_PATH}'."
rm -rf "$IMPORT_DIR" if [[ "$DRY_RUN" == "false" ]]; then rm -rf "$IMPORT_DIR"; fi
exit 1 exit 1
fi fi
fi fi
if [[ ! -d "$IMPORT_DIR" ]]; then if [[ ! -d "$IMPORT_DIR" && "$DRY_RUN" == "false" ]]; then
echo "❌ Error: Import directory '${IMPORT_DIR}' does not exist." echo "❌ Error: Import directory '${IMPORT_DIR}' does not exist."
exit 1 exit 1
fi fi
@@ -211,19 +272,31 @@ case "$ACTION" in
QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' ${QUERY_RENAME_PART} WITH IGNORE EXISTING THREADS ${THREADS};" QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' ${QUERY_RENAME_PART} WITH IGNORE EXISTING THREADS ${THREADS};"
EXIT_CODE=0
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\""
else
"$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1
EXIT_CODE=$? EXIT_CODE=$?
fi
local target_schema_name="${NEW_SCHEMA_NAME:-$SCHEMA_NAME}"
if [[ "$EXIT_CODE" -eq 0 ]]; then if [[ "$EXIT_CODE" -eq 0 ]]; then
echo "✅ Successfully imported schema." echo "✅ Successfully imported schema."
send_notification "${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' completed successfully."
else else
echo "❌ Error: Failed to import schema (hdbsql exit code: ${EXIT_CODE})." echo "❌ Error: Failed to import schema (hdbsql exit code: ${EXIT_CODE})."
send_notification "${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' FAILED."
fi fi
if [[ "$COMPRESS" == "true" ]]; then if [[ "$COMPRESS" == "true" ]]; then
echo "🧹 Cleaning up temporary directory..." echo "🧹 Cleaning up temporary directory..."
if [[ "$DRY_RUN" == "true" ]]; then
echo "[DRY RUN] Would remove temp directory: rm -rf \"$IMPORT_DIR\""
else
rm -rf "$IMPORT_DIR" rm -rf "$IMPORT_DIR"
fi fi
fi
;; ;;
*) *)

View File

@@ -11,6 +11,6 @@ SCRIPT_PACKAGES["Aurora Suite"]="1.1.0|https://git.technopunk.space/tomi/Scripts
SCRIPT_PACKAGES["Backup Suite"]="1.0.5|https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.conf" SCRIPT_PACKAGES["Backup Suite"]="1.0.5|https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.conf"
SCRIPT_PACKAGES["Key Manager"]="1.2.1|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hdb_keymanager.sh" SCRIPT_PACKAGES["Key Manager"]="1.2.1|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hdb_keymanager.sh"
SCRIPT_PACKAGES["File Cleaner"]="1.1.0|https://git.technopunk.space/tomi/Scripts/raw/branch/main/clean.sh" SCRIPT_PACKAGES["File Cleaner"]="1.1.0|https://git.technopunk.space/tomi/Scripts/raw/branch/main/clean.sh"
SCRIPT_PACKAGES["HANA Tool"]="1.0.4|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hanatool.sh" SCRIPT_PACKAGES["HANA Tool"]="1.3.0|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hanatool.sh"
# Example: Add a new script with its version. # Example: Add a new script with its version.
# SCRIPT_PACKAGES["My Other Script"]="1.0.0|https://path/to/my-other-script.sh" # SCRIPT_PACKAGES["My Other Script"]="1.0.0|https://path/to/my-other-script.sh"