diff --git a/hanatool.sh b/hanatool.sh index 0210485..2b6809e 100644 --- a/hanatool.sh +++ b/hanatool.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Version: 1.0.4 +# Version: 1.3.0 # ============================================================================== # SAP HANA Schema Management Tool (hanatool.sh) # @@ -11,6 +11,8 @@ HDBSQL_PATH="/usr/sap/hdbclient/hdbsql" COMPRESS=false THREADS=0 # 0 means auto-calculate later +DRY_RUN=false +NTFY_TOKEN="" # --- Help/Usage Function --- usage() { @@ -22,7 +24,7 @@ usage() { echo "Actions:" echo " export Export a schema to a specified path." echo " restore Restore a schema from a specified path." - echo " restore-rename Restore a schema from a path to a new schema name." + echo " restore-rename Restore a schema from a path to a new schema name." echo "" echo "Arguments:" echo " USER_KEY The user key from hdbuserstore for DB connection." @@ -33,37 +35,53 @@ usage() { echo "Options:" echo " -t, --threads N Specify the number of threads. Defaults to half of system cores." echo " -c, --compress Enable tar.gz compression for exports and decompression for imports." + echo " -n, --dry-run Show what commands would be executed without running them." + echo " --ntfy Send a notification via ntfy.sh upon completion/failure." echo " --hdbsql Specify a custom path for the hdbsql executable." echo " -h, --help Show this help message." echo "" echo "Examples:" - echo " # Export MYSCHEMA to /hana/backups/temp_export using compression and 8 threads" - echo " $0 MYKEY export MYSCHEMA /hana/backups/temp_export -c -t 8" + echo " # Export MYSCHEMA and send a notification on completion" + echo " $0 MYKEY export MYSCHEMA /hana/backups -c --ntfy tk_xxxxxxxxxxxx" echo "" - echo " # Restore MYSCHEMA from a compressed archive" - echo " $0 MYKEY restore MYSCHEMA /hana/backups/MYSCHEMA_20240101.tar.gz -c" - echo "" - echo " # Import MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path" + echo " # Restore MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path" echo " $0 MYKEY restore-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --hdbsql /sap/custom/hdbsql" } +# --- Notification Function --- +send_notification() { + local message="$1" + if [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "false" ]]; then + echo "ℹ️ Sending notification..." + curl -s -H "Authorization: Bearer $NTFY_TOKEN" -d "$message" https://ntfy.technopunk.space/sap > /dev/null + elif [[ -n "$NTFY_TOKEN" && "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would send notification: curl -H \"Authorization: Bearer ...\" -d \"$message\" https://ntfy.technopunk.space/sap" + fi +} + # --- Argument Parsing --- POSITIONAL_ARGS=() while [[ $# -gt 0 ]]; do case $1 in -t|--threads) THREADS="$2" - shift # past argument - shift # past value + shift 2 ;; -c|--compress) COMPRESS=true - shift # past argument + shift + ;; + -n|--dry-run) + DRY_RUN=true + shift + ;; + --ntfy) + NTFY_TOKEN="$2" + shift 2 ;; --hdbsql) HDBSQL_PATH="$2" - shift # past argument - shift # past value + shift 2 ;; -h|--help) usage @@ -71,7 +89,7 @@ while [[ $# -gt 0 ]]; do ;; *) POSITIONAL_ARGS+=("$1") # save positional arg - shift # past argument + shift ;; esac done @@ -84,6 +102,12 @@ SCHEMA_NAME="$3" # --- Main Logic --- +if [[ "$DRY_RUN" == "true" ]]; then + echo "⚠️ --- DRY RUN MODE ENABLED --- ⚠️" + echo "No actual commands will be executed." + echo "-------------------------------------" +fi + # Check for hdbsql executable if [[ ! -x "$HDBSQL_PATH" ]]; then echo "❌ Error: hdbsql not found or not executable at '${HDBSQL_PATH}'" @@ -119,37 +143,61 @@ case "$ACTION" in EXPORT_DIR="$TARGET_PATH" if [[ "$COMPRESS" == "true" ]]; then - # Use a temporary directory for the raw export - EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX") + if [[ "$DRY_RUN" == "true" ]]; then + EXPORT_DIR="${TARGET_PATH}/export_${SCHEMA_NAME}_DRYRUN_TEMP" + else + EXPORT_DIR=$(mktemp -d "${TARGET_PATH}/export_${SCHEMA_NAME}_XXXXXXXX") + fi echo "ℹ️ Using temporary export directory: ${EXPORT_DIR}" fi - mkdir -p "$EXPORT_DIR" + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would create directory: mkdir -p \"$EXPORT_DIR\"" + else + mkdir -p "$EXPORT_DIR" + fi QUERY="EXPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE THREADS ${THREADS};" - "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 - EXIT_CODE=$? + EXIT_CODE=0 + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\"" + else + "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 + EXIT_CODE=$? + fi if [[ "$EXIT_CODE" -eq 0 ]]; then echo "✅ Successfully exported schema '${SCHEMA_NAME}' to '${EXPORT_DIR}'." if [[ "$COMPRESS" == "true" ]]; then ARCHIVE_FILE="${TARGET_PATH}/${SCHEMA_NAME}_$(date +%Y%m%d_%H%M%S).tar.gz" echo "🗜️ Compressing files to '${ARCHIVE_FILE}'..." - tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")" - TAR_EXIT_CODE=$? + + TAR_EXIT_CODE=0 + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would execute tar: tar -czf \"$ARCHIVE_FILE\" -C \"$(dirname "$EXPORT_DIR")\" \"$(basename "$EXPORT_DIR")\"" + else + tar -czf "$ARCHIVE_FILE" -C "$(dirname "$EXPORT_DIR")" "$(basename "$EXPORT_DIR")" + TAR_EXIT_CODE=$? + fi + if [[ "$TAR_EXIT_CODE" -eq 0 ]]; then echo "✅ Successfully created archive." echo "🧹 Cleaning up temporary directory..." - rm -rf "$EXPORT_DIR" + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would remove temp directory: rm -rf \"$EXPORT_DIR\"" + else + rm -rf "$EXPORT_DIR" + fi else echo "❌ Error: Failed to create archive from '${EXPORT_DIR}'." fi fi + send_notification "✅ Export of schema '${SCHEMA_NAME}' completed successfully." else echo "❌ Error: Failed to export schema '${SCHEMA_NAME}' (hdbsql exit code: ${EXIT_CODE})." - # Clean up temp dir on failure - if [[ "$COMPRESS" == "true" ]]; then rm -rf "$EXPORT_DIR"; fi + send_notification "❌ Export of schema '${SCHEMA_NAME}' FAILED." + if [[ "$COMPRESS" == "true" && "$DRY_RUN" == "false" ]]; then rm -rf "$EXPORT_DIR"; fi fi ;; @@ -184,22 +232,35 @@ case "$ACTION" in IMPORT_DIR="$SOURCE_PATH" if [[ "$COMPRESS" == "true" ]]; then - if [[ ! -f "$SOURCE_PATH" ]]; then + if [[ ! -f "$SOURCE_PATH" && "$DRY_RUN" == "false" ]]; then echo "❌ Error: Source path '${SOURCE_PATH}' is not a valid file for compressed import." exit 1 fi - IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX") + + if [[ "$DRY_RUN" == "true" ]]; then + IMPORT_DIR="/tmp/import_${SCHEMA_NAME}_DRYRUN_TEMP" + else + IMPORT_DIR=$(mktemp -d "/tmp/import_${SCHEMA_NAME}_XXXXXXXX") + fi + echo "ℹ️ Decompressing to temporary directory: ${IMPORT_DIR}" - tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1 - TAR_EXIT_CODE=$? + + TAR_EXIT_CODE=0 + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would decompress archive: tar -xzf \"$SOURCE_PATH\" -C \"$IMPORT_DIR\" --strip-components=1" + else + tar -xzf "$SOURCE_PATH" -C "$IMPORT_DIR" --strip-components=1 + TAR_EXIT_CODE=$? + fi + if [[ "$TAR_EXIT_CODE" -ne 0 ]]; then echo "❌ Error: Failed to decompress '${SOURCE_PATH}'." - rm -rf "$IMPORT_DIR" + if [[ "$DRY_RUN" == "false" ]]; then rm -rf "$IMPORT_DIR"; fi exit 1 fi fi - if [[ ! -d "$IMPORT_DIR" ]]; then + if [[ ! -d "$IMPORT_DIR" && "$DRY_RUN" == "false" ]]; then echo "❌ Error: Import directory '${IMPORT_DIR}' does not exist." exit 1 fi @@ -211,18 +272,30 @@ case "$ACTION" in QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' ${QUERY_RENAME_PART} WITH IGNORE EXISTING THREADS ${THREADS};" - "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 - EXIT_CODE=$? + EXIT_CODE=0 + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would execute hdbsql: \"$HDBSQL_PATH\" -U \"$USER_KEY\" \"$QUERY\"" + else + "$HDBSQL_PATH" -U "$USER_KEY" "$QUERY" > /dev/null 2>&1 + EXIT_CODE=$? + fi + local target_schema_name="${NEW_SCHEMA_NAME:-$SCHEMA_NAME}" if [[ "$EXIT_CODE" -eq 0 ]]; then echo "✅ Successfully imported schema." + send_notification "✅ ${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' completed successfully." else echo "❌ Error: Failed to import schema (hdbsql exit code: ${EXIT_CODE})." + send_notification "❌ ${ACTION} of schema '${SCHEMA_NAME}' to '${target_schema_name}' FAILED." fi if [[ "$COMPRESS" == "true" ]]; then echo "🧹 Cleaning up temporary directory..." - rm -rf "$IMPORT_DIR" + if [[ "$DRY_RUN" == "true" ]]; then + echo "[DRY RUN] Would remove temp directory: rm -rf \"$IMPORT_DIR\"" + else + rm -rf "$IMPORT_DIR" + fi fi ;; diff --git a/packages.conf b/packages.conf index e3ed07b..cf639d4 100644 --- a/packages.conf +++ b/packages.conf @@ -11,6 +11,6 @@ SCRIPT_PACKAGES["Aurora Suite"]="1.1.0|https://git.technopunk.space/tomi/Scripts SCRIPT_PACKAGES["Backup Suite"]="1.0.5|https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.conf" SCRIPT_PACKAGES["Key Manager"]="1.2.1|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hdb_keymanager.sh" SCRIPT_PACKAGES["File Cleaner"]="1.1.0|https://git.technopunk.space/tomi/Scripts/raw/branch/main/clean.sh" -SCRIPT_PACKAGES["HANA Tool"]="1.0.4|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hanatool.sh" +SCRIPT_PACKAGES["HANA Tool"]="1.3.0|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hanatool.sh" # Example: Add a new script with its version. # SCRIPT_PACKAGES["My Other Script"]="1.0.0|https://path/to/my-other-script.sh"