aurora
This commit is contained in:
33
aurora/aurora.conf
Normal file
33
aurora/aurora.conf
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Configuration for the HANA Aurora Refresh Script
|
||||||
|
# Place this file in the same directory as the aurora_refresh.sh script.
|
||||||
|
|
||||||
|
# --- Main Settings ---
|
||||||
|
|
||||||
|
# The source production schema to be copied.
|
||||||
|
SCHEMA="SBO_DEMO"
|
||||||
|
|
||||||
|
# The user who will be granted privileges on the new Aurora schema.
|
||||||
|
AURORA_SCHEMA_USER="B1_53424F5F4348494D5045585F4155524F5241_RW"
|
||||||
|
|
||||||
|
# The database user for performing backup and administrative tasks.
|
||||||
|
BACKOP_USER="CRONKEY"
|
||||||
|
|
||||||
|
|
||||||
|
# --- Paths and Files ---
|
||||||
|
|
||||||
|
# The root directory where the script and its associated files are located.
|
||||||
|
SCRIPT_ROOT="/usr/sap/NDB/home/tools"
|
||||||
|
|
||||||
|
# The base directory for storing the temporary schema export.
|
||||||
|
BACKUP_DIR="/hana/shared/backup/schema"
|
||||||
|
|
||||||
|
# The full path to the HANA hdbsql executable.
|
||||||
|
HDBSQL="/usr/sap/NDB/HDB00/exe/hdbsql"
|
||||||
|
|
||||||
|
|
||||||
|
# --- Post-Import Scripts ---
|
||||||
|
|
||||||
|
# A space-separated list of SQL script files to run after the import is complete.
|
||||||
|
# These scripts should be located in the SCRIPT_ROOT directory.
|
||||||
|
#POST_SQL="NAVO_PARAMS.sql GRANT_OLI_ARF.sql"
|
||||||
|
POST_SQL=""
|
||||||
125
aurora/aurora.sh
Normal file
125
aurora/aurora.sh
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Exit immediately if a command exits with a non-zero status.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# === SETUP ===
|
||||||
|
# Determine script's directory and source the configuration file.
|
||||||
|
SCRIPT_DIR=$(dirname "$0")
|
||||||
|
CONFIG_FILE="${SCRIPT_DIR}/aurora.conf"
|
||||||
|
|
||||||
|
if [ ! -f "$CONFIG_FILE" ]; then
|
||||||
|
echo "Error: Configuration file not found at ${CONFIG_FILE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# shellcheck source=aurora.conf
|
||||||
|
. "$CONFIG_FILE"
|
||||||
|
|
||||||
|
# === DERIVED VARIABLES ===
|
||||||
|
TIMESTAMP=$(date "+%Y-%m-%d %H:%M:%S")
|
||||||
|
AURORA="${SCHEMA}_AURORA"
|
||||||
|
AURORA_TEMP_DIR="${BACKUP_DIR}/${AURORA}"
|
||||||
|
LOGFILE="${SCRIPT_ROOT}/aurora.log"
|
||||||
|
temp_compnyname=${SCHEMA#SBO_} # Remove SBO_ prefix
|
||||||
|
COMPNYNAME=${temp_compnyname%_PROD} # Remove _PROD suffix if it exists
|
||||||
|
|
||||||
|
# === FUNCTIONS ===
|
||||||
|
|
||||||
|
log() { echo "$(date +"%Y-%m-%d %H:%M:%S") - $1" | tee -a "$LOGFILE"; }
|
||||||
|
run_sql() {
|
||||||
|
log "Executing: $1"
|
||||||
|
"$HDBSQL" -U "${BACKOP_USER}" "$1" >/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
show_info() {
|
||||||
|
echo "Source Schema: ${SCHEMA}"
|
||||||
|
echo "Target Schema: ${AURORA}"
|
||||||
|
echo "Target Schema User: ${AURORA_SCHEMA_USER}"
|
||||||
|
echo "Company Name: ${COMPNYNAME}"
|
||||||
|
echo "Export Directory: ${AURORA_TEMP_DIR}"
|
||||||
|
echo "Log File: ${LOGFILE}"
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 [-n | -c | -i]"
|
||||||
|
echo " -n (new) : Export, import, and rename. (No privileges or post-scripts)"
|
||||||
|
echo " -c (complete) : Drop, export, import, grant privileges, and run post-scripts."
|
||||||
|
echo " -i (info) : Show configuration information."
|
||||||
|
}
|
||||||
|
|
||||||
|
export_schema() {
|
||||||
|
log "Starting schema export for '${SCHEMA}'."
|
||||||
|
mkdir -p "$AURORA_TEMP_DIR"
|
||||||
|
run_sql "EXPORT \"${SCHEMA}\".\"*\" AS BINARY INTO '$AURORA_TEMP_DIR' WITH REPLACE;"
|
||||||
|
log "Schema export completed."
|
||||||
|
}
|
||||||
|
|
||||||
|
import_and_rename() {
|
||||||
|
log "Starting import and rename to '${AURORA}'."
|
||||||
|
run_sql "IMPORT \"${SCHEMA}\".\"*\" FROM '$AURORA_TEMP_DIR' WITH RENAME SCHEMA \"${SCHEMA}\" TO \"${AURORA}\";"
|
||||||
|
log "Updating company name fields."
|
||||||
|
local update_sql="
|
||||||
|
UPDATE \"${AURORA}\".CINF SET \"CompnyName\"='AURORA ${COMPNYNAME} ${TIMESTAMP}';
|
||||||
|
UPDATE \"${AURORA}\".OADM SET \"CompnyName\"='AURORA ${COMPNYNAME} ${TIMESTAMP}';
|
||||||
|
UPDATE \"${AURORA}\".OADM SET \"PrintHeadr\"='AURORA ${COMPNYNAME} ${TIMESTAMP}';"
|
||||||
|
"$HDBSQL" -U "${BACKOP_USER}" -c ";" -I - <<EOF
|
||||||
|
${update_sql}
|
||||||
|
EOF
|
||||||
|
log "Import and rename completed."
|
||||||
|
}
|
||||||
|
|
||||||
|
grant_privileges() {
|
||||||
|
log "Granting privileges on '${AURORA}' to '${AURORA_SCHEMA_USER}'."
|
||||||
|
run_sql "GRANT ALL PRIVILEGES ON SCHEMA \"${AURORA}\" TO \"${AURORA_SCHEMA_USER}\";"
|
||||||
|
log "Privileges granted."
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_aurora_schema() {
|
||||||
|
log "Dropping existing '${AURORA}' schema."
|
||||||
|
"$HDBSQL" -U "${BACKOP_USER}" "DROP SCHEMA \"${AURORA}\" CASCADE;" >/dev/null 2>&1 || log "Could not drop schema '${AURORA}'. It might not exist."
|
||||||
|
log "Old schema dropped."
|
||||||
|
}
|
||||||
|
|
||||||
|
run_post_scripts() {
|
||||||
|
log "Running post-import SQL scripts: ${POST_SQL}"
|
||||||
|
for sql_file in $POST_SQL; do
|
||||||
|
log "Running script: ${sql_file}"
|
||||||
|
"$HDBSQL" -U "${BACKOP_USER}" -I "${SCRIPT_ROOT}/${sql_file}"
|
||||||
|
done
|
||||||
|
log "All post-import scripts completed."
|
||||||
|
}
|
||||||
|
|
||||||
|
# === SCRIPT EXECUTION ===
|
||||||
|
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
while getopts "nci" option; do
|
||||||
|
case "$option" in
|
||||||
|
n)
|
||||||
|
log "=== Starting 'new' operation ==="
|
||||||
|
export_schema
|
||||||
|
import_and_rename
|
||||||
|
log "=== 'New' operation finished successfully ==="
|
||||||
|
;;
|
||||||
|
c)
|
||||||
|
log "=== Starting 'complete' operation ==="
|
||||||
|
drop_aurora_schema
|
||||||
|
export_schema
|
||||||
|
import_and_rename
|
||||||
|
grant_privileges
|
||||||
|
run_post_scripts
|
||||||
|
log "=== 'Complete' operation finished successfully ==="
|
||||||
|
;;
|
||||||
|
i)
|
||||||
|
show_info
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Invalid option."
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
221
tomi.sh
221
tomi.sh
@@ -1,221 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Script to backup (export), restore (import), or restore-and-rename a SAP HANA schema.
|
|
||||||
# It automatically detects the number of available CPU threads to optimize operations.
|
|
||||||
# Designed for both interactive use and cronjob automation.
|
|
||||||
#
|
|
||||||
# ETomi 2025
|
|
||||||
|
|
||||||
# --- Configuration ---
|
|
||||||
hdbsql_path="/usr/sap/hdbclient/hdbsql"
|
|
||||||
|
|
||||||
# -- HANA Secure User Store Key --
|
|
||||||
user_key="CRONKEY"
|
|
||||||
|
|
||||||
# --- Validation ---
|
|
||||||
for cmd in hdbsql hdbuserstore tar gzip nproc pv mkdir chmod date; do
|
|
||||||
tool_path="$(dirname "$hdbsql_path")/$cmd"
|
|
||||||
if ! command -v $cmd &> /dev/null && [ ! -f "$tool_path" ]; then
|
|
||||||
echo "❌ Error: Required command '$cmd' is not found in your PATH or at '$tool_path'."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ "$#" -lt 2 ]; then
|
|
||||||
echo "❌ Error: Not enough arguments."
|
|
||||||
echo ""
|
|
||||||
echo "Usage: $0 <action> <arguments...>"
|
|
||||||
echo ""
|
|
||||||
echo "Actions:"
|
|
||||||
echo " setup-key <key_name> - Interactively create a secure user store key"
|
|
||||||
echo " backup <schema> <path> [--compress] [--timestamp] - Export a schema, optionally compress and/or add a timestamp"
|
|
||||||
echo " restore <schema> <path> - Import a schema from a path"
|
|
||||||
echo " restore-rename <schema> <new_schema> <path> - Import and rename a schema"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
action=$1
|
|
||||||
threads=$(nproc --all)
|
|
||||||
|
|
||||||
# --- Functions ---
|
|
||||||
confirm_action() {
|
|
||||||
if [ "$TERM" != "dumb" ] && [ -t 0 ]; then
|
|
||||||
read -p "❓ Are you sure you want to proceed with the '$1' operation? (y/n): " -n 1 -r
|
|
||||||
echo ""
|
|
||||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
|
||||||
echo "🛑 Operation cancelled by user."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
execute_sql() {
|
|
||||||
local command_to_run=$1
|
|
||||||
local action_name=$2
|
|
||||||
|
|
||||||
echo "⚙️ Executing SQL: $command_to_run"
|
|
||||||
$hdbsql_path -U $user_key "$command_to_run"
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "❌ Failure! The '$action_name' command failed. Please check the logs."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# --- Main Logic ---
|
|
||||||
# The setup-key action is handled first as it doesn't need all global variables.
|
|
||||||
if [ "$action" == "setup-key" ]; then
|
|
||||||
if [ "$#" -ne 2 ]; then
|
|
||||||
echo "❌ Error: Invalid arguments for 'setup-key'."
|
|
||||||
echo "Usage: $0 setup-key <key_name>"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
key_name=$2
|
|
||||||
hdbuserstore_path="$(dirname "$hdbsql_path")/hdbuserstore"
|
|
||||||
|
|
||||||
echo "--- Interactive Secure User Store Key Setup ---"
|
|
||||||
read -p "Enter HANA host [localhost]: " hdb_host
|
|
||||||
hdb_host=${hdb_host:-localhost}
|
|
||||||
|
|
||||||
read -p "Enter instance number [00]: " hdb_instance
|
|
||||||
hdb_instance=${hdb_instance:-00}
|
|
||||||
hdb_port="3${hdb_instance}15"
|
|
||||||
|
|
||||||
read -p "Enter tenant database name [NDB]: " hdb_tenant
|
|
||||||
hdb_tenant=${hdb_tenant:-NDB}
|
|
||||||
|
|
||||||
read -p "Enter database user [SYSTEM]: " hdb_user
|
|
||||||
hdb_user=${hdb_user:-SYSTEM}
|
|
||||||
|
|
||||||
read -sp "Enter password for '$hdb_user': " hdb_pass
|
|
||||||
echo ""
|
|
||||||
if [ -z "$hdb_pass" ]; then
|
|
||||||
echo "❌ Error: Password cannot be empty."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
connection_string="${hdb_host}:${hdb_port}@${hdb_tenant}"
|
|
||||||
echo "-------------------------------------"
|
|
||||||
echo "Key Name: $key_name"
|
|
||||||
echo "Connection String: $connection_string"
|
|
||||||
echo "Database User: $hdb_user"
|
|
||||||
echo "-------------------------------------"
|
|
||||||
confirm_action "create key '$key_name'"
|
|
||||||
|
|
||||||
echo "▶️ Creating secure store key..."
|
|
||||||
$hdbuserstore_path SET "$key_name" "$connection_string" "$hdb_user" "$hdb_pass"
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo "✅ Success! Key '$key_name' created. You can now set 'user_key=\"$key_name\"' at the top of this script."
|
|
||||||
else
|
|
||||||
echo "❌ Failure! Could not create key '$key_name'. Please check the details and try again."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# --- Actions (Backup/Restore) ---
|
|
||||||
echo "ℹ️ Detected $threads available CPU threads to use for the operation."
|
|
||||||
|
|
||||||
case "$action" in
|
|
||||||
backup)
|
|
||||||
if [ "$#" -lt 3 ] || [ "$#" -gt 5 ]; then
|
|
||||||
echo "❌ Error: Invalid arguments for 'backup'."
|
|
||||||
echo "Usage: $0 backup <schema_name> <path> [--compress] [--timestamp]"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
schema=$2
|
|
||||||
path=$3
|
|
||||||
compress_flag=false
|
|
||||||
timestamp_flag=false
|
|
||||||
|
|
||||||
# Loop through optional arguments to handle any order
|
|
||||||
for arg in "${@:4}"; do
|
|
||||||
case "$arg" in
|
|
||||||
--compress)
|
|
||||||
compress_flag=true
|
|
||||||
;;
|
|
||||||
--timestamp)
|
|
||||||
timestamp_flag=true
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "❌ Error: Invalid option '$arg'. Valid options are '--compress' or '--timestamp'."
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# Add timestamp to path if flag is set
|
|
||||||
if [ "$timestamp_flag" = true ]; then
|
|
||||||
timestamp=$(date +'%Y%m%d_%H%M%S')
|
|
||||||
path="${path}_${timestamp}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "ℹ️ Creating backup directory: $path"
|
|
||||||
mkdir -p "$path" && chmod 777 "$path"
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "❌ Failure! Could not create or set permissions for directory '$path'."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "▶️ Starting BACKUP (EXPORT) for schema '$schema' to '$path'..."
|
|
||||||
sql_command="EXPORT \"$schema\".\"*\" AS BINARY INTO '$path' WITH REPLACE THREADS $threads;"
|
|
||||||
execute_sql "$sql_command" "$action"
|
|
||||||
echo "✅ SQL Export completed successfully."
|
|
||||||
|
|
||||||
if [ "$compress_flag" = true ]; then
|
|
||||||
echo "▶️ Compressing backup directory: $path"
|
|
||||||
archive_file="${path}.tar.gz"
|
|
||||||
total_size=$(du -sb "$path" | awk '{print $1}')
|
|
||||||
tar cf - -C "$(dirname "$path")" "$(basename "$path")" | pv -s "$total_size" | gzip > "$archive_file"
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo "✅ Compression successful: $archive_file"
|
|
||||||
echo "🗑️ Removing original backup directory..."
|
|
||||||
rm -rf "$path"
|
|
||||||
else
|
|
||||||
echo "❌ Failure! Compression failed."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
|
|
||||||
restore)
|
|
||||||
if [ "$#" -ne 3 ]; then
|
|
||||||
echo "❌ Error: Invalid arguments for 'restore'."
|
|
||||||
echo "Usage: $0 restore <schema_name> <path>"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
schema=$2
|
|
||||||
path=$3
|
|
||||||
confirm_action "restore"
|
|
||||||
echo "▶️ Starting RESTORE (IMPORT) for schema '$schema' from '$path'..."
|
|
||||||
sql_command="IMPORT \"$schema\".\"*\" AS BINARY FROM '$path' WITH IGNORE EXISTING THREADS $threads;"
|
|
||||||
execute_sql "$sql_command" "$action"
|
|
||||||
echo "✅ Success! The '$action' operation completed."
|
|
||||||
;;
|
|
||||||
|
|
||||||
restore-rename)
|
|
||||||
if [ "$#" -ne 4 ]; then
|
|
||||||
echo "❌ Error: Invalid arguments for 'restore-rename'."
|
|
||||||
echo "Usage: $0 restore-rename <schema_name> <new_schema_name> <path>"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
schema=$2
|
|
||||||
new_schema=$3
|
|
||||||
path=$4
|
|
||||||
confirm_action "restore-rename"
|
|
||||||
echo "▶️ Starting RESTORE & RENAME for schema '$schema' from '$path' to '$new_schema'..."
|
|
||||||
sql_command="IMPORT \"$schema\".\"*\" AS BINARY FROM '$path' WITH IGNORE EXISTING THREADS $threads RENAME SCHEMA \"$schema\" TO \"$new_schema\";"
|
|
||||||
execute_sql "$sql_command" "$action"
|
|
||||||
echo "✅ Success! The '$action' operation completed."
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
echo "❌ Error: Invalid action '$action'."
|
|
||||||
echo "Valid actions are 'backup', 'restore', 'restore-rename', or 'setup-key'."
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
exit 0
|
|
||||||
Reference in New Issue
Block a user