Compare commits
9 Commits
8b7f7dcf09
...
eeb5b2eb7b
| Author | SHA1 | Date | |
|---|---|---|---|
| eeb5b2eb7b | |||
| a6150467e5 | |||
| 2424d55426 | |||
| 408f2396da | |||
| a16b8aa42b | |||
| d9760b9072 | |||
| 229683dfa5 | |||
| 2d5d2dfa9c | |||
| 61e44106e5 |
@@ -80,10 +80,7 @@ Here's a breakdown of the scripts included in this repository:
|
||||
* Can drop existing Aurora schema before refresh.
|
||||
* Grants privileges to a specified user.
|
||||
* Runs post-import SQL scripts.
|
||||
* **Usage**: `./aurora/aurora.sh [new | complete | info]`
|
||||
* `new`: Export, import, and rename (no privileges or post-scripts).
|
||||
* `complete`: Drop, export, import, grant privileges, and run post-scripts.
|
||||
* `info`: Show configuration information.
|
||||
* **Usage**: The script runs automatically based on the settings in `aurora/aurora.conf`. It is typically scheduled via cron.
|
||||
|
||||
### 8. `aurora/aurora.conf` (Configuration for `aurora.sh`)
|
||||
|
||||
|
||||
@@ -1,31 +1,39 @@
|
||||
# Configuration for the HANA Aurora Refresh Script
|
||||
# Place this file in the same directory as the aurora.sh script.
|
||||
# Configuration for the Aurora Refresh Script (aurora_refresh.sh)
|
||||
# Place this file in the same directory as the script.
|
||||
|
||||
# --- Main Settings ---
|
||||
|
||||
# The source production schema to be copied.
|
||||
SCHEMA="SBO_DEMO"
|
||||
# Example: "SBO_COMPANY_PROD"
|
||||
SOURCE_SCHEMA="SBODEMOHU"
|
||||
|
||||
# The user who will be granted privileges on the new Aurora schema.
|
||||
AURORA_SCHEMA_USER="B1_53424F5F4348494D5045585F4155524F5241_RW"
|
||||
# The HANA user that will be granted read/write access to the new Aurora schema.
|
||||
# This is typically a technical user for the application.
|
||||
# Example: "B1_..._RW"
|
||||
AURORA_USER="B1_XXXXXXXXX_RW"
|
||||
|
||||
# The database user for performing backup and administrative tasks.
|
||||
BACKOP_USER="CRONKEY"
|
||||
# The secure user store key for the HANA database user with privileges to
|
||||
# perform EXPORT, IMPORT, DROP SCHEMA, and GRANT commands (e.g., SYSTEM).
|
||||
# Using a key (hdbuserstore) is more secure than hardcoding a password.
|
||||
# Example: "CRONKEY"
|
||||
DB_ADMIN_KEY="CRONKEY"
|
||||
|
||||
# --- Paths and Files ---
|
||||
# --- Paths ---
|
||||
|
||||
# The base directory for storing the temporary schema export.
|
||||
BACKUP_DIR="/hana/shared/backup/schema"
|
||||
# The base directory where the temporary schema export folder will be created.
|
||||
# Ensure the <sid>adm user has write permissions here.
|
||||
BACKUP_BASE_DIR="/hana/shared/backup/schema"
|
||||
|
||||
# The full path to the HANA hdbsql executable.
|
||||
HDBSQL="/usr/sap/NDB/HDB00/exe/hdbsql"
|
||||
|
||||
# The root directory where post-import SQL scripts are located.
|
||||
SQL_SCRIPTS_ROOT="/usr/sap/NDB/home/tools/sql"
|
||||
|
||||
# --- Post-Import Scripts ---
|
||||
# --- Post-Import Scripts (Optional) ---
|
||||
|
||||
# The root directory where the SQL script and its associated files are located.
|
||||
SQL_ROOT="/usr/sap/NDB/home/tools"
|
||||
|
||||
# A space-separated list of SQL script files to run after the import is complete.
|
||||
# These scripts should be located in the SCRIPT_ROOT directory.
|
||||
POST_SQL=""
|
||||
# A space-separated list of SQL script filenames to run after the import is complete.
|
||||
# The script will look for these files inside the SQL_SCRIPTS_ROOT directory.
|
||||
# Leave empty ("") if no scripts are needed.
|
||||
# Example: "update_user_emails.sql cleanup_tables.sql"
|
||||
POST_IMPORT_SQL=""
|
||||
|
||||
208
aurora/aurora.sh
208
aurora/aurora.sh
@@ -1,115 +1,119 @@
|
||||
#!/bin/sh
|
||||
# Version: 1.2.6
|
||||
# Exit immediately if a command exits with a non-zero status.
|
||||
set -e
|
||||
# Version: 2.1.0
|
||||
#
|
||||
# Purpose: Performs an automated refresh of a SAP HANA schema. It exports a
|
||||
# production schema and re-imports it under a new name ("Aurora")
|
||||
# to create an up-to-date, non-production environment for testing.
|
||||
# Designed to be run via cron, typically in the early morning.
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# === SETUP ===
|
||||
# Determine script's directory and source the configuration file.
|
||||
# --- Basic Setup ---
|
||||
# Exit immediately if any command fails or if an unset variable is used.
|
||||
set -eu
|
||||
|
||||
# --- Configuration ---
|
||||
# Load the configuration file located in the same directory as the script.
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
CONFIG_FILE="${SCRIPT_DIR}/aurora.conf"
|
||||
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
echo "❌ Error: Configuration file not found at '${CONFIG_FILE}'" >&2
|
||||
echo "❌ FATAL: Configuration file not found at '${CONFIG_FILE}'" >&2
|
||||
exit 1
|
||||
fi
|
||||
# shellcheck source=aurora.conf
|
||||
. "$CONFIG_FILE"
|
||||
|
||||
# === DERIVED VARIABLES ===
|
||||
TIMESTAMP=$(date "+%Y-%m-%d %H:%M:%S")
|
||||
AURORA="${SCHEMA}_AURORA"
|
||||
AURORA_TEMP_DIR="${BACKUP_DIR}/${AURORA}"
|
||||
COMPNYNAME="${SCHEMA#SBO_}"
|
||||
[[ "$COMPNYNAME" == *_PROD ]] && COMPNYNAME="${COMPNYNAME%_PROD}" # Remove _PROD suffix if it exists
|
||||
|
||||
# === FUNCTIONS ===
|
||||
|
||||
run_sql() {
|
||||
echo "ℹ️ Executing: $1"
|
||||
"$HDBSQL" -U "${BACKOP_USER}" "$1" >/dev/null
|
||||
}
|
||||
|
||||
show_info() {
|
||||
echo "Source Schema: ${SCHEMA}"
|
||||
echo "Target Schema: ${AURORA}"
|
||||
echo "Target Schema User: ${AURORA_SCHEMA_USER}"
|
||||
echo "Company Name: ${COMPNYNAME}"
|
||||
echo "Export Directory: ${AURORA_TEMP_DIR}"
|
||||
}
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 [--info]"
|
||||
echo " --info : Show configuration information."
|
||||
echo " (No argument) : Drop, export, import, grant privileges, and run post-scripts."
|
||||
}
|
||||
|
||||
export_schema() {
|
||||
echo "⬇️ Starting schema export for '${SCHEMA}'..."
|
||||
mkdir -p "$AURORA_TEMP_DIR"
|
||||
run_sql "EXPORT \"${SCHEMA}\".\"*\" AS BINARY INTO '$AURORA_TEMP_DIR' WITH REPLACE;"
|
||||
echo "✅ Schema export completed."
|
||||
}
|
||||
|
||||
import_and_rename() {
|
||||
echo "⬆️ Starting import and rename to '${AURORA}'..."
|
||||
run_sql "IMPORT \"${SCHEMA}\".\"*\" FROM '$AURORA_TEMP_DIR' WITH IGNORE EXISTING RENAME SCHEMA \"${SCHEMA}\" TO \"${AURORA}\";"
|
||||
echo "ℹ️ Updating company name fields..."
|
||||
"$HDBSQL" -U "${BACKOP_USER}" -c ";" -I - <<EOF
|
||||
UPDATE \"${AURORA}\".CINF SET \"CompnyName\"='AURORA ${COMPNYNAME} ${TIMESTAMP}';
|
||||
UPDATE \"${AURORA}\".OADM SET \"CompnyName\"='AURORA ${COMPNYNAME} ${TIMESTAMP}';
|
||||
UPDATE \"${AURORA}\".OADM SET \"PrintHeadr\"='AURORA ${COMPNYNAME} ${TIMESTAMP}';
|
||||
EOF
|
||||
echo "✅ Import and rename completed."
|
||||
}
|
||||
|
||||
grant_privileges() {
|
||||
echo "🔑 Granting privileges on '${AURORA}' to '${AURORA_SCHEMA_USER}'..."
|
||||
run_sql "GRANT ALL PRIVILEGES ON SCHEMA \"${AURORA}\" TO \"${AURORA_SCHEMA_USER}\";"
|
||||
echo "✅ Privileges granted."
|
||||
}
|
||||
|
||||
drop_aurora_schema() {
|
||||
echo "🗑️ Dropping existing '${AURORA}' schema..."
|
||||
run_sql "DROP SCHEMA \"${AURORA}\" CASCADE;" 2>/dev/null || echo "⚠️ Could not drop schema '${AURORA}'. It might not exist." >&2
|
||||
echo "✅ Old schema dropped."
|
||||
}
|
||||
|
||||
run_post_scripts() {
|
||||
echo "⚙️ Running post-import SQL scripts: ${POST_SQL}..."
|
||||
for sql_file in $POST_SQL; do
|
||||
echo " - Running script: ${sql_file}"
|
||||
"$HDBSQL" -U "${BACKOP_USER}" -I "${SCRIPT_ROOT}/${sql_file}"
|
||||
done
|
||||
echo "✅ All post-import scripts completed."
|
||||
}
|
||||
|
||||
cleanup_exported_files() {
|
||||
echo "🧹 Cleaning up exported files from '${AURORA_TEMP_DIR}'..."
|
||||
rm -rf "$AURORA_TEMP_DIR"
|
||||
echo "✅ Exported files cleaned up."
|
||||
}
|
||||
|
||||
# === SCRIPT EXECUTION ===
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "🚀 Starting 'complete' operation (default)..."
|
||||
drop_aurora_schema
|
||||
export_schema
|
||||
import_and_rename
|
||||
grant_privileges
|
||||
run_post_scripts
|
||||
cleanup_exported_files
|
||||
echo "🎉 'Complete' operation finished successfully!"
|
||||
exit 0
|
||||
# --- Validate Configuration ---
|
||||
if [ ! -x "$HDBSQL" ]; then
|
||||
echo "❌ FATAL: hdbsql is not found or not executable at '${HDBSQL}'" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
--info)
|
||||
show_info
|
||||
;;
|
||||
*)
|
||||
echo "❌ Error: Invalid argument '$1'." >&2
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
# --- Derived Variables (Do Not Edit) ---
|
||||
TIMESTAMP=$(date "+%Y-%m-%d %H:%M:%S")
|
||||
AURORA_SCHEMA="${SOURCE_SCHEMA}_AURORA"
|
||||
EXPORT_DIR="${BACKUP_BASE_DIR}/${AURORA_SCHEMA}_TEMP_EXPORT"
|
||||
COMPANY_NAME_BASE=$(echo "${SOURCE_SCHEMA}" | sed 's/^SBO_//' | sed 's/_PROD$//')
|
||||
|
||||
# --- Main Execution ---
|
||||
echo
|
||||
echo "🚀 [$(date "+%T")] Starting Aurora Refresh for '${SOURCE_SCHEMA}'"
|
||||
echo "--------------------------------------------------------"
|
||||
echo " Source Schema: ${SOURCE_SCHEMA}"
|
||||
echo " Target Aurora Schema: ${AURORA_SCHEMA}"
|
||||
echo " Temp Export Path: ${EXPORT_DIR}"
|
||||
echo "--------------------------------------------------------"
|
||||
|
||||
# 1. Drop the old Aurora schema if it exists.
|
||||
echo "🗑️ Dropping old schema '${AURORA_SCHEMA}' (if it exists)..."
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" "DROP SCHEMA \"${AURORA_SCHEMA}\" CASCADE" >/dev/null 2>&1 || echo " -> Schema did not exist. Continuing."
|
||||
|
||||
# 2. Prepare the temporary export directory.
|
||||
echo "📁 Preparing temporary export directory..."
|
||||
rm -rf "$EXPORT_DIR"
|
||||
mkdir -p "$EXPORT_DIR"
|
||||
|
||||
# 3. Export the source schema.
|
||||
echo "⬇️ Exporting source schema '${SOURCE_SCHEMA}' to binary files..."
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" "EXPORT \"${SOURCE_SCHEMA}\".\"*\" AS BINARY INTO '${EXPORT_DIR}' WITH REPLACE;" >/dev/null
|
||||
echo " -> Export complete."
|
||||
|
||||
# 4. Import the data into the new Aurora schema.
|
||||
echo "⬆️ Importing data and renaming schema to '${AURORA_SCHEMA}'..."
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" "IMPORT \"${SOURCE_SCHEMA}\".\"*\" FROM '${EXPORT_DIR}' WITH IGNORE EXISTING RENAME SCHEMA \"${SOURCE_SCHEMA}\" TO \"${AURORA_SCHEMA}\";" >/dev/null
|
||||
echo " -> Import complete."
|
||||
|
||||
# 5. Update company name in CINF and OADM tables.
|
||||
echo "✍️ Updating company name fields in the new schema..."
|
||||
|
||||
# First, get the original company name from the source schema.
|
||||
# The query returns a header and the name in quotes. sed gets the second line, tr removes the quotes, xargs trims whitespace.
|
||||
echo " -> Fetching original company name from '${SOURCE_SCHEMA}'..."
|
||||
ORIGINAL_COMPNY_NAME=$("$HDBSQL" -U "$DB_ADMIN_KEY" "SELECT \"CompnyName\" FROM \"${SOURCE_SCHEMA}\".\"CINF\"" | sed -n '2p' | tr -d '"' | xargs)
|
||||
|
||||
# Construct the new name in the desired format.
|
||||
DATE_STAMP=$(date "+%Y-%m-%d")
|
||||
NEW_COMPNY_NAME="AURORA - ${ORIGINAL_COMPNY_NAME} - ${DATE_STAMP}"
|
||||
echo " -> New company name set to: '${NEW_COMPNY_NAME}'"
|
||||
|
||||
echo " -> Updating CINF table..."
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" "UPDATE \"${AURORA_SCHEMA}\".CINF SET \"CompnyName\" = '${NEW_COMPNY_NAME}';" >/dev/null
|
||||
|
||||
echo " -> Updating OADM table..."
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" "UPDATE \"${AURORA_SCHEMA}\".OADM SET \"CompnyName\" = '${NEW_COMPNY_NAME}', \"PrintHeadr\" = '${NEW_COMPNY_NAME}';" >/dev/null
|
||||
echo " -> Company info updated."
|
||||
|
||||
# 6. Grant privileges to the read/write user.
|
||||
echo "🔑 Granting ALL privileges on '${AURORA_SCHEMA}' to '${AURORA_USER}'..."
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" "GRANT ALL PRIVILEGES ON SCHEMA \"${AURORA_SCHEMA}\" TO \"${AURORA_USER}\";" >/dev/null
|
||||
echo " -> Privileges granted."
|
||||
|
||||
# 7. Run post-import SQL scripts, if any are defined.
|
||||
if [ -n "$POST_IMPORT_SQL" ]; then
|
||||
echo "⚙️ Running post-import SQL scripts..."
|
||||
# Use word splitting intentionally here
|
||||
# shellcheck disable=SC2086
|
||||
for sql_file in $POST_IMPORT_SQL; do
|
||||
full_path="${SQL_SCRIPTS_ROOT}/${sql_file}"
|
||||
if [ -f "$full_path" ]; then
|
||||
echo " -> Executing: ${sql_file}"
|
||||
"$HDBSQL" -U "$DB_ADMIN_KEY" -I "$full_path"
|
||||
else
|
||||
echo " -> ⚠️ WARNING: Script not found: ${full_path}" >&2
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "ℹ️ No post-import SQL scripts to run."
|
||||
fi
|
||||
|
||||
# 8. Clean up the temporary export files.
|
||||
echo "🧹 Cleaning up temporary directory '${EXPORT_DIR}'..."
|
||||
rm -rf "$EXPORT_DIR"
|
||||
echo " -> Cleanup complete."
|
||||
|
||||
echo "--------------------------------------------------------"
|
||||
echo "✅ [$(date "+%T")] Aurora Refresh finished successfully!"
|
||||
echo
|
||||
|
||||
exit 0
|
||||
|
||||
26
hanatool.sh
26
hanatool.sh
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
# Version: 1.4.6
|
||||
# Version: 1.5.0
|
||||
# ==============================================================================
|
||||
# SAP HANA Schema and Tenant Management Tool (hanatool.sh)
|
||||
#
|
||||
@@ -12,6 +12,7 @@ COMPRESS=false
|
||||
THREADS=0 # 0 means auto-calculate later
|
||||
DRY_RUN=false
|
||||
NTFY_TOKEN=""
|
||||
IMPORT_REPLACE=false
|
||||
|
||||
# --- Help/Usage Function ---
|
||||
usage() {
|
||||
@@ -38,6 +39,7 @@ usage() {
|
||||
echo " -c, --compress Enable tar.gz compression for exports and backups."
|
||||
echo " -n, --dry-run Show what commands would be executed without running them."
|
||||
echo " --ntfy <token> Send a notification via ntfy.sh upon completion/failure."
|
||||
echo " --replace Use the 'REPLACE' option for imports instead of 'IGNORE EXISTING'."
|
||||
echo " --hdbsql <path> Specify a custom path for the hdbsql executable."
|
||||
echo " -h, --help Show this help message."
|
||||
echo ""
|
||||
@@ -48,8 +50,8 @@ usage() {
|
||||
echo " # Import MYSCHEMA from a compressed archive"
|
||||
echo " $0 MY_SCHEMA_KEY import MYSCHEMA /hana/backups/MYSCHEMA_20240101.tar.gz -c"
|
||||
echo ""
|
||||
echo " # Import MYSCHEMA as MYSCHEMA_TEST using a custom hdbsql path"
|
||||
echo " $0 MY_SCHEMA_KEY import-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --hdbsql /sap/custom/hdbsql"
|
||||
echo " # Import MYSCHEMA as MYSCHEMA_TEST, replacing any existing objects"
|
||||
echo " $0 MY_SCHEMA_KEY import-rename MYSCHEMA MYSCHEMA_TEST /hana/backups/temp_export --replace"
|
||||
}
|
||||
|
||||
# --- Notification Function ---
|
||||
@@ -83,6 +85,10 @@ while [[ $# -gt 0 ]]; do
|
||||
NTFY_TOKEN="$2"
|
||||
shift 2
|
||||
;;
|
||||
--replace)
|
||||
IMPORT_REPLACE=true
|
||||
shift
|
||||
;;
|
||||
--hdbsql)
|
||||
HDBSQL_PATH="$2"
|
||||
shift 2
|
||||
@@ -350,12 +356,20 @@ case "$ACTION" in
|
||||
exit 1
|
||||
fi
|
||||
|
||||
QUERY_RENAME_PART=""
|
||||
local import_options
|
||||
if [[ "$IMPORT_REPLACE" == "true" ]]; then
|
||||
import_options="REPLACE"
|
||||
echo " - Mode: REPLACE"
|
||||
else
|
||||
import_options="IGNORE EXISTING"
|
||||
echo " - Mode: IGNORE EXISTING (default)"
|
||||
fi
|
||||
|
||||
if [[ "$ACTION" == "import-rename" ]]; then
|
||||
QUERY_RENAME_PART="RENAME SCHEMA \"${SCHEMA_NAME}\" TO \"${NEW_SCHEMA_NAME}\""
|
||||
import_options="${import_options} RENAME SCHEMA \"${SCHEMA_NAME}\" TO \"${NEW_SCHEMA_NAME}\""
|
||||
fi
|
||||
|
||||
QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' WITH IGNORE EXISTING THREADS ${THREADS} ${QUERY_RENAME_PART};"
|
||||
QUERY="IMPORT \"${SCHEMA_NAME}\".\"*\" AS BINARY FROM '${IMPORT_DIR}' WITH ${import_options} THREADS ${THREADS};"
|
||||
|
||||
EXIT_CODE=0
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
|
||||
234
install.sh
234
install.sh
@@ -2,38 +2,26 @@
|
||||
|
||||
# --- Main Script ---
|
||||
|
||||
# This script presents a menu of software packages defined in a remote
|
||||
# configuration file. The user can select one or more packages, and the
|
||||
# script will download the corresponding files. It includes a feature to show
|
||||
# a diff and ask for confirmation before overwriting existing config files.
|
||||
# It can also check for updates to already-installed scripts.
|
||||
# This script presents a menu of software packages, or installs them
|
||||
# non-interactively via command-line arguments. It downloads files from a
|
||||
# remote configuration, shows a diff for config updates, and checks versions.
|
||||
|
||||
# --- Functions ---
|
||||
|
||||
# A simple function to log messages with a consistent format.
|
||||
log() {
|
||||
echo "[$1] $2"
|
||||
}
|
||||
|
||||
# Get the version from a local script file.
|
||||
# It reads the first 5 lines and extracts the version number.
|
||||
get_local_version() {
|
||||
local file_path="$1"
|
||||
if [[ -f "${file_path}" ]]; then
|
||||
# Grep for the version line, then use awk to get the last field.
|
||||
head -n 5 "${file_path}" | grep -m 1 "^# Version:" | awk '{print $NF}'
|
||||
else
|
||||
echo "0.0.0" # Return a base version if file doesn't exist.
|
||||
fi
|
||||
}
|
||||
|
||||
# Compare two version strings (e.g., "1.2.0" vs "1.10.0").
|
||||
# Returns 0 if v1 is newer, 1 if they are the same or v2 is newer.
|
||||
# Compare two version strings. Returns 0 if v1 is newer.
|
||||
is_version_greater() {
|
||||
local v1=$1
|
||||
local v2=$2
|
||||
# Use sort's version sorting capability to find the "highest" version.
|
||||
# If the highest version is v1, then v1 > v2.
|
||||
if [[ "$(printf '%s\n' "$v1" "$v2" | sort -V | head -n 1)" != "$v1" ]]; then
|
||||
return 0 # v1 is greater
|
||||
else
|
||||
@@ -41,181 +29,199 @@ is_version_greater() {
|
||||
fi
|
||||
}
|
||||
|
||||
# New function to process a single selected package.
|
||||
# Process a single selected package.
|
||||
process_package() {
|
||||
local choice="$1"
|
||||
# Check if the choice is a valid package name.
|
||||
if [[ -z "${SCRIPT_PACKAGES[$choice]}" ]]; then
|
||||
log "❌" "Invalid package name provided: '${choice}'"
|
||||
local choice_key="$1"
|
||||
local force_overwrite="$2" # Expects "true" or "false"
|
||||
|
||||
if [[ -z "${SCRIPT_PACKAGES[$choice_key]}" ]]; then
|
||||
echo "[❌] Invalid package name provided: '${choice_key}'"
|
||||
return
|
||||
fi
|
||||
|
||||
echo
|
||||
log "⬇️" "Processing package: '${choice}'..."
|
||||
echo "[⬇️] Processing package: '${choice_key}'..."
|
||||
|
||||
# Get the config value and split it into version and URLs
|
||||
config_value="${SCRIPT_PACKAGES[$choice]}"
|
||||
remote_version=$(echo "${config_value}" | cut -d'|' -f1)
|
||||
urls_to_download=$(echo "${config_value}" | cut -d'|' -f2-)
|
||||
# Parse the new config format
|
||||
config_value="${SCRIPT_PACKAGES[$choice_key]}"
|
||||
display_name=$(echo "${config_value}" | cut -d'|' -f1)
|
||||
remote_version=$(echo "${config_value}" | cut -d'|' -f2)
|
||||
description=$(echo "${config_value}" | cut -d'|' -f3)
|
||||
urls_to_download=$(echo "${config_value}" | cut -d'|' -f4-)
|
||||
|
||||
read -r -a urls_to_download_array <<< "$urls_to_download"
|
||||
|
||||
for url in "${urls_to_download_array[@]}"; do
|
||||
filename=$(basename "${url}")
|
||||
# If it's a .conf file AND it already exists, ask to overwrite.
|
||||
# Handle config file overwrites
|
||||
if [[ "${filename}" == *.conf && -f "${filename}" ]]; then
|
||||
log "->" "Found existing config file: '${filename}'."
|
||||
tmp_file=$(mktemp)
|
||||
if [[ "$force_overwrite" == "true" ]]; then
|
||||
echo "[⚠️] Overwriting '${filename}' due to --overwrite-config flag."
|
||||
if ! curl -fsSL -o "${filename}" "${url}"; then
|
||||
echo "[❌] Error: Failed to download '${filename}'."
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "[->] Found existing config file: '${filename}'."
|
||||
tmp_file=$(mktemp)
|
||||
if curl -fsSL -o "${tmp_file}" "${url}"; then
|
||||
log "🔎" "Comparing versions..."
|
||||
echo "[🔎] Comparing versions..."
|
||||
echo "-------------------- DIFF START --------------------"
|
||||
if command -v colordiff &> /dev/null; then
|
||||
colordiff -u "${filename}" "${tmp_file}"
|
||||
else
|
||||
# Attempt to use diff's color option, which is common.
|
||||
diff --color=always -u "${filename}" "${tmp_file}" 2>/dev/null || diff -u "${filename}" "${tmp_file}"
|
||||
fi
|
||||
echo "--------------------- DIFF END ---------------------"
|
||||
|
||||
read -p "Do you want to overwrite '${filename}'? (y/N) " -n 1 -r REPLY
|
||||
echo
|
||||
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
mv "${tmp_file}" "${filename}"
|
||||
log "✅" "Updated '${filename}'."
|
||||
echo "[✅] Updated '${filename}'."
|
||||
else
|
||||
rm "${tmp_file}"
|
||||
log "🤷" "Kept existing version of '${filename}'."
|
||||
echo "[🤷] Kept existing version of '${filename}'."
|
||||
fi
|
||||
else
|
||||
log "❌" "Error: Failed to download new version of '${filename}' for comparison."
|
||||
echo "[❌] Error downloading new version of '${filename}' for comparison."
|
||||
rm -f "${tmp_file}"
|
||||
fi
|
||||
else
|
||||
# Original download logic for all other files.
|
||||
log "->" "Downloading '${filename}'..."
|
||||
echo "[->] Downloading '${filename}'..."
|
||||
if curl -fsSL -o "${filename}" "${url}"; then
|
||||
log "✅" "Successfully downloaded '${filename}'."
|
||||
echo "[✅] Successfully downloaded '${filename}'."
|
||||
if [[ "${filename}" == *.sh || "${filename}" == *.bash ]]; then
|
||||
chmod +x "${filename}"
|
||||
log "🤖" "Made '${filename}' executable."
|
||||
echo "[🤖] Made '${filename}' executable."
|
||||
fi
|
||||
else
|
||||
log "❌" "Error: Failed to download '${filename}'."
|
||||
echo "[❌] Error: Failed to download '${filename}'."
|
||||
fi
|
||||
fi
|
||||
done
|
||||
log "📦" "Package processing complete for '${choice}'."
|
||||
echo "[📦] Package processing complete for '${choice_key}'."
|
||||
}
|
||||
|
||||
|
||||
# --- Main Logic ---
|
||||
|
||||
# Generate a unique temporary filename with a timestamp.
|
||||
conf_file="packages.conf.$(date +%Y%m%d%H%M%S)"
|
||||
|
||||
# Set up a trap to delete the temporary file on exit.
|
||||
trap 'rm -f "${conf_file}"' EXIT
|
||||
|
||||
# Download the configuration file.
|
||||
log "🔄" "Downloading configuration file..."
|
||||
echo "[🔄] Downloading configuration file..."
|
||||
if ! curl -fsSL -o "${conf_file}" "https://git.technopunk.space/tomi/Scripts/raw/branch/main/packages.conf"; then
|
||||
log "❌" "Error: Failed to download packages.conf. Exiting."
|
||||
echo "[❌] Error: Failed to download packages.conf. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
log "✅" "Configuration file downloaded successfully."
|
||||
echo "[✅] Configuration file downloaded successfully."
|
||||
|
||||
# Source the configuration file to load the SCRIPT_PACKAGES associative array.
|
||||
source "${conf_file}"
|
||||
|
||||
# --- Update Check & User Interface ---
|
||||
# --- Argument Parsing for Non-Interactive Mode ---
|
||||
if [ "$#" -gt 0 ]; then
|
||||
declare -a packages_to_install
|
||||
overwrite_configs=false
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--overwrite-config)
|
||||
overwrite_configs=true
|
||||
;;
|
||||
-*)
|
||||
echo "[❌] Unknown flag: $arg" >&2
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
packages_to_install+=("$arg")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Create an array of options from the package names.
|
||||
# We will modify this array to show installation and update status.
|
||||
declare -a options
|
||||
package_keys=("${!SCRIPT_PACKAGES[@]}")
|
||||
|
||||
log "🔎" "Checking for updates..."
|
||||
for key in "${package_keys[@]}"; do
|
||||
# The config format is now "VERSION|URL1 URL2..."
|
||||
config_value="${SCRIPT_PACKAGES[$key]}"
|
||||
remote_version=$(echo "${config_value}" | cut -d'|' -f1)
|
||||
|
||||
# Get just the URLs and assume the first URL is the main script to check.
|
||||
urls=$(echo "${config_value}" | cut -d'|' -f2-)
|
||||
read -r -a url_array <<< "$urls"
|
||||
main_script_filename=$(basename "${url_array[0]}")
|
||||
|
||||
# Get the local version of the main script file.
|
||||
local_version=$(get_local_version "${main_script_filename}")
|
||||
|
||||
status=""
|
||||
if [[ -f "${main_script_filename}" ]]; then
|
||||
status=" (Installed: v${local_version})"
|
||||
# Compare versions
|
||||
if is_version_greater "$remote_version" "$local_version"; then
|
||||
status+=" [Update available: v${remote_version}]"
|
||||
fi
|
||||
if [ ${#packages_to_install[@]} -eq 0 ]; then
|
||||
echo "[❌] Flag provided with no package names. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
options+=("${key}${status}")
|
||||
done
|
||||
|
||||
options+=("Quit") # Add a Quit option to the menu.
|
||||
echo "[🚀] Running in non-interactive mode."
|
||||
for pkg_key in "${packages_to_install[@]}"; do
|
||||
if [[ -n "${SCRIPT_PACKAGES[$pkg_key]}" ]]; then
|
||||
process_package "$pkg_key" "$overwrite_configs"
|
||||
else
|
||||
echo "[⚠️] Unknown package: '$pkg_key'. Skipping."
|
||||
fi
|
||||
done
|
||||
echo "[🏁] Non-interactive run complete."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# --- User Interaction ---
|
||||
# --- Interactive Mode ---
|
||||
declare -a ordered_keys
|
||||
package_keys_sorted=($(for k in "${!SCRIPT_PACKAGES[@]}"; do echo $k; done | sort))
|
||||
ordered_keys=("${package_keys_sorted[@]}")
|
||||
|
||||
# Manually display the options with numbers.
|
||||
# --- Display Menu ---
|
||||
echo
|
||||
echo "-------------------------------------"
|
||||
echo " Script Downloader "
|
||||
echo "-------------------------------------"
|
||||
for i in "${!options[@]}"; do
|
||||
printf "%d) %s\n" "$((i+1))" "${options[$i]}"
|
||||
done
|
||||
echo "[🔎] Checking for updates..."
|
||||
echo
|
||||
|
||||
# Prompt the user for one or more choices.
|
||||
for i in "${!ordered_keys[@]}"; do
|
||||
key="${ordered_keys[$i]}"
|
||||
config_value="${SCRIPT_PACKAGES[$key]}"
|
||||
display_name=$(echo "${config_value}" | cut -d'|' -f1)
|
||||
remote_version=$(echo "${config_value}" | cut -d'|' -f2)
|
||||
description=$(echo "${config_value}" | cut -d'|' -f3)
|
||||
urls=$(echo "${config_value}" | cut -d'|' -f4-)
|
||||
read -r -a url_array <<< "$urls"
|
||||
main_script_filename=$(basename "${url_array[0]}")
|
||||
local_version=$(get_local_version "${main_script_filename}")
|
||||
|
||||
# Print main package line
|
||||
echo -e "\033[1m$((i+1))) $key - $display_name (v$remote_version)\033[0m"
|
||||
# Print description
|
||||
echo " $description"
|
||||
# Print status
|
||||
if [[ -f "${main_script_filename}" ]]; then
|
||||
if is_version_greater "$remote_version" "$local_version"; then
|
||||
echo -e " \033[33m[Update available: v${local_version} -> v${remote_version}]\033[0m"
|
||||
else
|
||||
echo -e " \033[32m[Installed: v${local_version}]\033[0m"
|
||||
fi
|
||||
fi
|
||||
echo
|
||||
done
|
||||
quit_num=$((${#ordered_keys[@]} + 1))
|
||||
echo -e "\033[1m${quit_num}) Quit\033[0m"
|
||||
echo
|
||||
|
||||
# --- Handle User Input ---
|
||||
read -p "Please enter your choice(s) (e.g., 1 3 4), or press Enter to quit: " -r -a user_choices
|
||||
|
||||
# If no choices are made, exit gracefully.
|
||||
if [ ${#user_choices[@]} -eq 0 ]; then
|
||||
log "👋" "No selection made. Exiting."
|
||||
echo "[👋] No selection made. Exiting."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Loop through the user's selections and process each one.
|
||||
for choice_num in "${user_choices[@]}"; do
|
||||
# Validate that the input is a number.
|
||||
if ! [[ "$choice_num" =~ ^[0-9]+$ ]]; then
|
||||
log "⚠️" "Skipping invalid input: '${choice_num}'. Not a number."
|
||||
echo "[⚠️] Skipping invalid input: '${choice_num}'. Not a number."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Convert selection number to array index (0-based).
|
||||
index=$((choice_num - 1))
|
||||
|
||||
# Validate that the index is within the bounds of the options array.
|
||||
if [[ -z "${options[$index]}" ]]; then
|
||||
log "⚠️" "Skipping invalid choice: '${choice_num}'. Out of range."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Get the choice text from the array.
|
||||
choice_with_status="${options[$index]}"
|
||||
|
||||
# Strip the status message to get the package key.
|
||||
choice=$(echo "${choice_with_status}" | sed 's/ (.*//')
|
||||
|
||||
# Handle the "Quit" option.
|
||||
if [[ "${choice}" == "Quit" ]]; then
|
||||
log "👋" "Quit selected. Exiting now."
|
||||
if [ "$choice_num" -eq "$quit_num" ]; then
|
||||
echo "[👋] Quit selected. Exiting."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Process the selected package.
|
||||
process_package "${choice}"
|
||||
index=$((choice_num - 1))
|
||||
if [[ -z "${ordered_keys[$index]}" ]]; then
|
||||
echo "[⚠️] Skipping invalid choice: '${choice_num}'. Out of range."
|
||||
continue
|
||||
fi
|
||||
choice_key="${ordered_keys[$index]}"
|
||||
process_package "$choice_key" "false" # Never force overwrite in interactive mode
|
||||
done
|
||||
|
||||
echo
|
||||
log "🏁" "All selected packages have been processed."
|
||||
echo "[🏁] All selected packages have been processed."
|
||||
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
# --- Company Information ---
|
||||
# Used to identify which company the alert is for.
|
||||
COMPANY_NAME="Your Company Name"
|
||||
COMPANY_NAME="Company"
|
||||
|
||||
# --- Notification Settings ---
|
||||
# Your ntfy.sh topic URL
|
||||
NTFY_TOPIC_URL="https://ntfy.technopunk.space/sap"
|
||||
# Your ntfy.sh bearer token (if required)
|
||||
NTFY_TOKEN="your_ntfy_token_here"
|
||||
NTFY_TOKEN="tk_xxxxx"
|
||||
|
||||
# --- HANA Connection Settings ---
|
||||
# Full path to the sapcontrol executable
|
||||
@@ -27,6 +27,8 @@ DISK_USAGE_THRESHOLD=80
|
||||
TRUNCATED_PERCENTAGE_THRESHOLD=50
|
||||
# Percentage of 'Free' log segments below which an alert is triggered
|
||||
FREE_PERCENTAGE_THRESHOLD=25
|
||||
# Maximum age of the last successful full data backup in hours.
|
||||
BACKUP_THRESHOLD_HOURS=25
|
||||
|
||||
# --- Monitored Directories ---
|
||||
# List of directories to check for disk usage (space-separated)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
# Version: 1.0.5
|
||||
# Version: 1.2.0
|
||||
# =============================================================================
|
||||
# SAP HANA Monitoring Script
|
||||
#
|
||||
@@ -28,21 +28,71 @@ if [ ! -f "$CONFIG_FILE" ]; then
|
||||
fi
|
||||
source "$CONFIG_FILE"
|
||||
|
||||
STATE_DIR="${SCRIPT_DIR}/monitor_state"
|
||||
mkdir -p "${STATE_DIR}"
|
||||
|
||||
# Helper functions for state management
|
||||
get_state() {
|
||||
local key="$1"
|
||||
if [ -f "${STATE_DIR}/${key}.state" ]; then
|
||||
cat "${STATE_DIR}/${key}.state"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
set_state() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
echo "$value" > "${STATE_DIR}/${key}.state"
|
||||
}
|
||||
|
||||
HOSTNAME=$(hostname)
|
||||
SQL_QUERY="SELECT b.host, b.service_name, a.state, count(*) FROM PUBLIC.M_LOG_SEGMENTS a JOIN PUBLIC.M_SERVICES b ON (a.host = b.host AND a.port = b.port) GROUP BY b.host, b.service_name, a.state;"
|
||||
|
||||
send_notification() {
|
||||
local title="$1"
|
||||
local message="$2"
|
||||
local full_message="[${COMPANY_NAME} | ${HOSTNAME}] ${message}"
|
||||
curl -H "Authorization: Bearer ${NTFY_TOKEN}" -H "Title: ${title}" -d "${full_message}" "${NTFY_TOPIC_URL}" > /dev/null 2>&1
|
||||
send_notification_if_changed() {
|
||||
local alert_key="$1"
|
||||
local title_prefix="$2" # e.g., "HANA Process"
|
||||
local current_message="$3"
|
||||
local is_alert_condition="$4" # "true" or "false"
|
||||
local current_value="$5" # The value to store as state (e.g., "85%", "GREEN", "ALERT")
|
||||
|
||||
local previous_value=$(get_state "${alert_key}")
|
||||
|
||||
if [ "$current_value" != "$previous_value" ]; then
|
||||
local full_title=""
|
||||
local full_message=""
|
||||
|
||||
if [ "$is_alert_condition" == "true" ]; then
|
||||
full_title="${title_prefix} Alert"
|
||||
full_message="🚨 Critical: ${current_message}"
|
||||
else
|
||||
# Check if it was previously an alert (i.e., previous_value was not "OK")
|
||||
if [ -n "$previous_value" ] && [ "$previous_value" != "OK" ]; then
|
||||
full_title="${title_prefix} Resolved"
|
||||
full_message="✅ Resolved: ${current_message}"
|
||||
else
|
||||
# No alert, and no previous alert to resolve, so just update state silently
|
||||
set_state "${alert_key}" "$current_value"
|
||||
echo "ℹ️ State for ${alert_key} updated to ${current_value}. No notification sent."
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
local final_message="[${COMPANY_NAME} | ${HOSTNAME}] ${full_message}"
|
||||
curl -H "Authorization: Bearer ${NTFY_TOKEN}" -H "Title: ${full_title}" -d "${final_message}" "${NTFY_TOPIC_URL}" > /dev/null 2>&1
|
||||
set_state "${alert_key}" "$current_value"
|
||||
echo "🔔 Notification sent for ${alert_key}: ${full_message}"
|
||||
else
|
||||
echo "ℹ️ State for ${alert_key} unchanged. No notification sent."
|
||||
fi
|
||||
}
|
||||
|
||||
# --- HANA Process Status ---
|
||||
echo "⚙️ Checking HANA process status..."
|
||||
if [ ! -x "$SAPCONTROL_PATH" ]; then
|
||||
echo "❌ Error: sapcontrol not found or not executable at ${SAPCONTROL_PATH}" >&2
|
||||
send_notification "HANA Monitor Error" "❌ Error: sapcontrol not found or not executable at ${SAPCONTROL_PATH}"
|
||||
send_notification_if_changed "hana_sapcontrol_path" "HANA Monitor Error" "sapcontrol not found or not executable at ${SAPCONTROL_PATH}" "true" "SAPCONTROL_ERROR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -51,23 +101,28 @@ non_green_processes=$("${SAPCONTROL_PATH}" -nr "${HANA_INSTANCE_NR}" -function G
|
||||
if [ -n "$non_green_processes" ]; then
|
||||
echo "🚨 Alert: One or more HANA processes are not running!" >&2
|
||||
echo "$non_green_processes" >&2
|
||||
send_notification "HANA Process Alert" "🚨 Critical: One or more HANA processes are not GREEN. Problem processes: ${non_green_processes}"
|
||||
send_notification_if_changed "hana_processes" "HANA Process" "One or more HANA processes are not GREEN. Problem processes: ${non_green_processes}" "true" "PROCESS_ALERT:${non_green_processes}"
|
||||
exit 1 # Exit early as other checks might fail
|
||||
else
|
||||
send_notification_if_changed "hana_processes" "HANA Process" "All HANA processes are GREEN." "false" "OK"
|
||||
echo "✅ Success! All HANA processes are GREEN."
|
||||
fi
|
||||
echo "✅ Success! All HANA processes are GREEN."
|
||||
|
||||
# --- Disk Space Monitoring ---
|
||||
echo "ℹ️ Checking disk usage..."
|
||||
for dir in "${DIRECTORIES_TO_MONITOR[@]}"; do
|
||||
if [ ! -d "$dir" ]; then
|
||||
echo "⚠️ Warning: Directory '$dir' not found. Skipping." >&2
|
||||
send_notification_if_changed "disk_dir_not_found_${dir//\//_}" "HANA Disk Warning" "Directory '$dir' not found." "true" "DIR_NOT_FOUND"
|
||||
continue
|
||||
fi
|
||||
usage=$(df -h "$dir" | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
echo " - ${dir} is at ${usage}%"
|
||||
if (( $(echo "$usage > $DISK_USAGE_THRESHOLD" | bc -l) )); then
|
||||
echo "🚨 Alert: ${dir} usage is at ${usage}% which is above the ${DISK_USAGE_THRESHOLD}% threshold." >&2
|
||||
send_notification "HANA Disk Alert" "🚨 Critical: Disk usage for ${dir} is at ${usage}%."
|
||||
send_notification_if_changed "disk_usage_${dir//\//_}" "HANA Disk" "Disk usage for ${dir} is at ${usage}%." "true" "${usage}%"
|
||||
else
|
||||
send_notification_if_changed "disk_usage_${dir//\//_}" "HANA Disk" "Disk usage for ${dir} is at ${usage}% (below threshold)." "false" "OK"
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -75,14 +130,14 @@ done
|
||||
echo "⚙️ Executing HANA SQL query..."
|
||||
if [ ! -x "$HDBSQL_PATH" ]; then
|
||||
echo "❌ Error: hdbsql not found or not executable at ${HDBSQL_PATH}" >&2
|
||||
send_notification "HANA Monitor Error" "❌ Error: hdbsql not found or not executable at ${HDBSQL_PATH}"
|
||||
send_notification_if_changed "hana_hdbsql_path" "HANA Monitor Error" "hdbsql not found or not executable at ${HDBSQL_PATH}" "true" "HDBSQL_ERROR"
|
||||
exit 1
|
||||
fi
|
||||
readarray -t sql_output < <("$HDBSQL_PATH" -U "$HANA_USER_KEY" -c ";" "$SQL_QUERY" 2>&1)
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "❌ Failure! The hdbsql command failed. Please check logs." >&2
|
||||
error_message=$(printf '%s\n' "${sql_output[@]}")
|
||||
send_notification "HANA Monitor Error" "❌ Failure! The hdbsql command failed. Details: ${error_message}"
|
||||
send_notification_if_changed "hana_hdbsql_command" "HANA Monitor Error" "The hdbsql command failed. Details: ${error_message}" "true" "HDBSQL_COMMAND_FAILED"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -108,19 +163,58 @@ echo "ℹ️ Free Segments: ${free_segments}"
|
||||
|
||||
if [ $total_segments -eq 0 ]; then
|
||||
echo "⚠️ Warning: No log segments found. Skipping percentage checks." >&2
|
||||
send_notification_if_changed "hana_log_segments_total" "HANA Log Segment Warning" "No log segments found. Skipping percentage checks." "true" "NO_LOG_SEGMENTS"
|
||||
exit 0
|
||||
else
|
||||
send_notification_if_changed "hana_log_segments_total" "HANA Log Segment" "Log segments found." "false" "OK"
|
||||
fi
|
||||
|
||||
truncated_percentage=$((truncated_segments * 100 / total_segments))
|
||||
if (( $(echo "$truncated_percentage > $TRUNCATED_PERCENTAGE_THRESHOLD" | bc -l) )); then
|
||||
echo "🚨 Alert: ${truncated_percentage}% of log segments are 'Truncated'." >&2
|
||||
send_notification "HANA Log Segment Alert" "🚨 Alert: ${truncated_percentage}% of HANA log segments are in 'Truncated' state."
|
||||
send_notification_if_changed "hana_log_truncated" "HANA Log Segment" "${truncated_percentage}% of HANA log segments are in 'Truncated' state." "true" "${truncated_percentage}%"
|
||||
else
|
||||
send_notification_if_changed "hana_log_truncated" "HANA Log Segment" "${truncated_percentage}% of HANA log segments are in 'Truncated' state (below threshold)." "false" "OK"
|
||||
fi
|
||||
|
||||
free_percentage=$((free_segments * 100 / total_segments))
|
||||
if (( $(echo "$free_percentage < $FREE_PERCENTAGE_THRESHOLD" | bc -l) )); then
|
||||
echo "🚨 Alert: Only ${free_percentage}% of log segments are 'Free'." >&2
|
||||
send_notification "HANA Log Segment Alert" "🚨 Alert: Only ${free_percentage}% of HANA log segments are in 'Free' state."
|
||||
send_notification_if_changed "hana_log_free" "HANA Log Segment" "Only ${free_percentage}% of HANA log segments are in 'Free' state." "true" "${free_percentage}%"
|
||||
else
|
||||
send_notification_if_changed "hana_log_free" "HANA Log Segment" "Only ${free_percentage}% of HANA log segments are in 'Free' state (above threshold)." "false" "OK"
|
||||
fi
|
||||
|
||||
echo "ℹ️ Checking last successful data backup status..."
|
||||
|
||||
# Query to get the start time of the most recent successful complete data backup
|
||||
last_backup_date=$("$HDBSQL_PATH" -U "$HANA_USER_KEY" -j -a -x \
|
||||
"SELECT TOP 1 SYS_START_TIME FROM M_BACKUP_CATALOG WHERE ENTRY_TYPE_NAME = 'complete data backup' AND STATE_NAME = 'successful' ORDER BY SYS_START_TIME DESC" 2>/dev/null | tr -d "\"" | sed 's/\..*//') # sed removes fractional seconds
|
||||
|
||||
if [[ -z "$last_backup_date" ]]; then
|
||||
# No successful backup found at all
|
||||
local message="No successful complete data backup found for ${COMPANY_NAME} HANA."
|
||||
echo "🚨 Critical: ${message}"
|
||||
send_notification_if_changed "hana_backup_status" "HANA Backup" "${message}" "true" "NO_BACKUP"
|
||||
return
|
||||
fi
|
||||
|
||||
# Convert dates to epoch seconds for comparison
|
||||
last_backup_epoch=$(date -d "$last_backup_date" +%s)
|
||||
current_epoch=$(date +%s)
|
||||
threshold_seconds=$((BACKUP_THRESHOLD_HOURS * 3600))
|
||||
|
||||
age_seconds=$((current_epoch - last_backup_epoch))
|
||||
age_hours=$((age_seconds / 3600))
|
||||
|
||||
if (( age_seconds > threshold_seconds )); then
|
||||
local message="Last successful HANA backup for ${COMPANY_NAME} is ${age_hours} hours old, which exceeds the threshold of ${BACKUP_THRESHOLD_HOURS} hours. Last backup was on: ${last_backup_date}."
|
||||
echo "🚨 Critical: ${message}"
|
||||
send_notification_if_changed "hana_backup_status" "HANA Backup" "${message}" "true" "${age_hours}h"
|
||||
else
|
||||
local message="Last successful backup is ${age_hours} hours old (Threshold: ${BACKUP_THRESHOLD_HOURS} hours)."
|
||||
echo "✅ Success! ${message}"
|
||||
send_notification_if_changed "hana_backup_status" "HANA Backup" "${message}" "false" "OK"
|
||||
fi
|
||||
|
||||
echo "✅ Success! HANA monitoring check complete."
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This file contains the configuration for the script downloader.
|
||||
# The `SCRIPT_PACKAGES` associative array maps a package name to a
|
||||
# pipe-separated string: "<version>|<space-separated list of URLs>".
|
||||
# The `SCRIPT_PACKAGES` associative array maps a short package name
|
||||
# to a pipe-separated string with the following format:
|
||||
# "<Display Name>|<Version>|<Description>|<Space-separated list of URLs>"
|
||||
|
||||
declare -A SCRIPT_PACKAGES
|
||||
|
||||
# The version should match the "# Version: x.x.x" line in the main script file.
|
||||
SCRIPT_PACKAGES["Aurora Suite"]="1.2.6|https://git.technopunk.space/tomi/Scripts/raw/branch/main/aurora/aurora.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/aurora/aurora.conf"
|
||||
SCRIPT_PACKAGES["Backup Suite"]="1.0.5|https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.conf"
|
||||
SCRIPT_PACKAGES["Monitor Suite"]="1.0.5|https://git.technopunk.space/tomi/Scripts/raw/branch/main/monitor/monitor.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/monitor/monitor.conf"
|
||||
SCRIPT_PACKAGES["Key Manager"]="1.2.1|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hdb_keymanager.sh"
|
||||
SCRIPT_PACKAGES["File Cleaner"]="1.1.0|https://git.technopunk.space/tomi/Scripts/raw/branch/main/clean.sh"
|
||||
SCRIPT_PACKAGES["HANA Tool"]="1.4.6|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hanatool.sh"
|
||||
# Example: Add a new script with its version.
|
||||
# SCRIPT_PACKAGES["My Other Script"]="1.0.0|https://path/to/my-other-script.sh"
|
||||
# Format: short_name="Display Name|Version|Description|URL1 URL2..."
|
||||
SCRIPT_PACKAGES["aurora"]="Aurora Suite|2.1.0|A collection of scripts for managing Aurora database instances.|https://git.technopunk.space/tomi/Scripts/raw/branch/main/aurora/aurora.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/aurora/aurora.conf"
|
||||
SCRIPT_PACKAGES["backup"]="Backup Suite|1.0.5|A comprehensive script for backing up system files and databases.|https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/backup/backup.conf"
|
||||
SCRIPT_PACKAGES["monitor"]="Monitor Suite|1.2.0|Scripts for monitoring system health and performance metrics.|https://git.technopunk.space/tomi/Scripts/raw/branch/main/monitor/monitor.sh https://git.technopunk.space/tomi/Scripts/raw/branch/main/monitor/monitor.conf"
|
||||
SCRIPT_PACKAGES["keymanager"]="Key Manager|1.2.1|A utility for managing HDB user keys for SAP HANA.|https://git.technopunk.space/tomi/Scripts/raw/branch/main/keymanager.sh"
|
||||
SCRIPT_PACKAGES["cleaner"]="File Cleaner|1.1.0|A simple script to clean up temporary files and logs.|https://git.technopunk.space/tomi/Scripts/raw/branch/main/cleaner.sh"
|
||||
SCRIPT_PACKAGES["hanatool"]="HANA Tool|1.5.0|A command-line tool for various SAP HANA administration tasks.|https://git.technopunk.space/tomi/Scripts/raw/branch/main/hanatool.sh"
|
||||
|
||||
Reference in New Issue
Block a user