3 Star 3 Fork 3

Gitee 极速下载/postgres-checkup

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
此仓库是为了提升国内下载速度的镜像仓库,每日同步一次。 原始仓库: https://gitlab.com/postgres-ai/postgres-checkup
克隆/下载
checkup 45.70 KB
一键复制 编辑 原始数据 按行查看 历史
Anatoly Stansler 提交于 2020-05-16 03:24 +08:00 . fix: dockerfile, pghrep build
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489
#!/bin/bash
#
# Copyright © Postgres.ai (https://postgres.ai), Nikolay Samokhvalov
#
# Automated health-checks of PostgreSQL clusters
#
# Usage: ./checkup --help
#
# GLOBALS (user-assigned variables)
FULL_REPORT_FNAME="0_Full_report.md"
# GLOBALS (autoload, do not change)
: ${DEBUG:=false} # print debug output
SCRIPT_NAME=$(basename $0)
SCRIPT_DIR=$(dirname $0)
PGHREP_BIN="${SCRIPT_DIR}/pghrep/bin/pghrep"
SAFE_IFS="$IFS"
ALL_ARGS="$@"
OPTIONS_ERROR_EXIT="false"
DEFAULT_LIST_LIMIT=50
DEFAULT_CONNECTION_TIMEOUT=10
DEFAULT_PG_PORT=5432
DEFAULT_SSH_PORT=22
LARGE_DB_ITEMS_COUNT=100000
AVAILABLE_MODES=("collect" "process" "upload" "help" "run")
# Output styles (only BOLD is supported by default GNU screen)
BOLD=`tput md 2>/dev/null` || :
RESET=`tput me 2>/dev/null` || :
#######################################
# Print a message to STDOUT with timestamp
# Globals:
# None
# Arguments:
# None
# Returns:
# (text) STDOUT
#######################################
function msg() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] $@"
}
#######################################
# Print a debug-level message to STDOUT with timestamp
# Globals:
# DEBUG
# Arguments:
# (text) Message
# Returns:
# None
#######################################
function dbg() {
if [[ $DEBUG == "true" ]] ; then
msg "DEBUG: ${FUNCNAME[1]}: $@"
fi
}
#######################################
# Print an error/warning/notice to STDERR with timestamp and error location
# Please use 'exit' with code after usage
# of this function (if needed)
# Globals:
# None
# Arguments:
# (text) Error message
# Returns:
# (text) STDERR
#######################################
function err() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ERROR: ${FUNCNAME[1]}: $@" >&2
}
#######################################
# Print an error/warning/notice to STDERR with timestamp only
# Please use 'exit' with code after usage
# of this function (if needed)
# Globals:
# None
# Arguments:
# (text) Error message
# Returns:
# (text) STDERR
#######################################
function errmsg() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] $@" >&2
}
#######################################
# Error trapping function, prints line number
# Globals:
# SCRIPT_NAME, BASH_LINENO[0]
# Arguments:
# (text) Message
# Returns:
# (lines with text) STDOUT
#######################################
error_handler() {
err "^^^ ERROR at [file: '${SCRIPT_NAME}', line: '${BASH_LINENO[0]}']" >&2
echo >&2
}
#######################################
# Cleanup function: close ssh sockets, etc.
# Globals:
# HOST
# Arguments:
# None
# Returns:
# (lines with text) STDOUT/STDERR
#######################################
cleanup_and_exit() {
local exit_code="$?" # we can detect exit code here
if [[ ! -z ${HOST+x} ]]; then
dbg "closing ssh conenction to host '$HOST' (if exists)"
(ssh -O exit ${HOST} 2>/dev/null) || true
fi
dbg "exit code is: '${exit_code}'"
exit "${exit_code}"
}
#######################################
# Read non-comment and non-empty lines from cli.conf
# Globals:
# SCRIPT_DIR
# Arguments:
# None
# Returns:
# (lines with text) STDOUT
#######################################
load_cli_res() {
local setting
if [[ ! -f "${SCRIPT_DIR}/resources/cli.conf" ]]; then
err "Cannot load '${SCRIPT_DIR}/resources/cli.conf'"
exit 2
fi
while read -r setting; do
# skip comments and empty lines
local re='^(#|$|[:blank:])'
[[ "$setting" =~ $re ]] && continue
echo "${setting}'"
done < "${SCRIPT_DIR}"/resources/cli.conf
}
#######################################
# Fill structures with possible CLI arguments from file
# Globals:
# CLI_ARGS_POSSIBLE, SECTION[], SHORT_NAME[],
# FULL_NAME[], ARG_TYPE[], MANDATARY[], DESCRIPTION[],
# Arguments:
# None
# Returns:
# None
#######################################
read_possible_args() {
local iter_num=0
local section short_name full_name arg_type mandatary description
if [[ ! -f "${SCRIPT_DIR}/resources/cli.conf" ]]; then
err "Can't load '${SCRIPT_DIR}/resources/cli.conf'"
exit 2
fi
while IFS="|" read -r section short_name full_name internal_name arg_type mandatary arg_mode description; do
# cut last garbage symbol
# TODO(vyagofarov): understand this 'cutting' behavior
description=${description%?}
SECTION[$iter_num]="$section"
SHORT_NAME[$iter_num]="$short_name"
FULL_NAME[$iter_num]="$full_name"
INTERNAL_NAME[$iter_num]="$internal_name"
ARG_TYPE[$iter_num]="$arg_type"
MANDATARY[$iter_num]="$mandatary"
ARG_MODE[$iter_num]="$arg_mode"
DESCRIPTION[$iter_num]="$description"
dbg "iteration number: $iter_num"
dbg "1: section '${SECTION[$iter_num]}'"
dbg "2: short_name '${SHORT_NAME[$iter_num]}'"
dbg "3: full_name '${FULL_NAME[$iter_num]}'"
dbg "4: internal_name '${INTERNAL_NAME[$iter_num]}'"
dbg "5: arg_type '${ARG_TYPE[$iter_num]}'"
dbg "6: mandatary '${MANDATARY[$iter_num]}'"
dbg "6: mode '${ARG_MODE[$iter_num]}'"
dbg "7: description '${DESCRIPTION[$iter_num]}'"
iter_num=$(( iter_num + 1 ))
done < <(load_cli_res)
# $CLI_ARGS_POSSIBLE is a global index
# for all CLI input values and their properties,
# starting from zero (convenient for arrays)
CLI_ARGS_POSSIBLE=$(( iter_num - 1 ))
dbg "possible args are read"
}
#######################################
# Load configuration from a file and save parameters in an indexed array
# Globals:
# $1, CLI_ARGS_POSSIBLE, ARG_VALUE[],
# CLI_ARGS_CNT, ARG_IS_GIVEN[]
# Arguments:
# (text) config file path
# Returns:
# None
#######################################
load_config_params () {
dbg "Load params from config file"
config_filename=${1}
if [[ ! -f "$config_filename" ]]; then
err "Config filename ${config_filename} not found."
exit 1
fi
eval $(${PGHREP_BIN} --mode loadcfg --path $config_filename 2>/dev/null)
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
var_name="CONFIG__"${FULL_NAME[$i]}
var_name=${var_name//-/_}
if [[ ! -z ${!var_name+x} ]]; then
value=$(eval echo "\$$var_name")
dbg "$var_name = $value"
if [[ "${ARG_TYPE[$i]}" = "None" ]]; then
ARG_VALUE[$i]="true"
ARG_IS_GIVEN[$i]="true"
else
if [[ -z "${value+x}" ]] || [[ "${value}" =~ $re ]]; then
err "Empty value is not allowed for variable '--${FULL_NAME[$i]}' in config file '${config_filename}'."
exit 1
fi
ARG_VALUE[$i]=$value
ARG_IS_GIVEN[$i]="true"
fi
fi
CLI_ARGS_CNT=$(( CLI_ARGS_CNT + 1 ))
done
return
}
#######################################
# Parse CLI arguments and save as an indexed array
# Globals:
# $1, CLI_ARGS_POSSIBLE, SHORT_NAME[], FULL_NAME[], ARG_VALUE[],
# CLI_ARGS_CNT, ARG_IS_GIVEN[]
# Arguments:
# $@
# Returns:
# None
#######################################
process_cli_args() {
local cli_arg_cur_value i
local re='^-'
local while_loops_cnt=0
local argvalue
dbg "Valid CLI args possible count: ${CLI_ARGS_POSSIBLE}+1"
CLI_ARGS_CNT=0
while [[ ! -z "${1+x}" ]]; do
# print help if first argument matches regular expression
local help_re="(-+(help|usage|\?))|(help|usage|\?)"
if [[ "${1}" =~ $help_re ]]; then
usage "Help" "0"
exit 0
fi
if [[ "${AVAILABLE_MODES[@]}" =~ "${1}" ]]; then
ARG_VALUE[0]="${1}"
ARG_IS_GIVEN[0]="true"
shift 1
continue
fi
# avoid infinite loop if argument is unknown
while_loops_cnt=$(( while_loops_cnt + 1 ))
[[ $while_loops_cnt -gt $(( CLI_ARGS_POSSIBLE * 2 )) ]] && break
# first, error if argument is unknown:
local arg_is_valid="false"
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
if [[ "${1}" = "-${SHORT_NAME[$i]}" ]] || [[ "${1}" = "--${FULL_NAME[$i]}" ]]; then
arg_is_valid="true"
break
fi
done
if [[ "${arg_is_valid}" == "false" ]]; then
err "invalid argument '${1}'"
exit 1
fi
# compare given argument to all possible arguments from cli.conf
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
if [[ ! -z "${1+x}" ]]; then
case $1 in
"-${SHORT_NAME[$i]}" | "--${FULL_NAME[$i]}" )
# argument without value (like '--force', ARG_TYPE is 'None')
if [[ "${1}" = "-c" ]] || [[ "${1}" = "--config" ]]; then
# parse yaml config file and load params
load_config_params ${2}
shift 2
continue
fi
if [[ "${ARG_TYPE[$i]}" = "None" ]]; then
ARG_VALUE[$i]="true"
ARG_IS_GIVEN[$i]="true"
shift 1
# argument with value
else
if [[ -z "${2+x}" ]]; then
err "empty value for variable '--${FULL_NAME[$i]}'"
exit 1
fi
if [[ "${2}" =~ $re ]]; then
err "empty value for variable '--${FULL_NAME[$i]}'"
exit 1
fi
ARG_VALUE[$i]="${2}"
ARG_IS_GIVEN[$i]="true"
dbg "${SHORT_NAME[$i]} 45 ${ARG_VALUE[$i]}"
shift 2
fi
CLI_ARGS_CNT=$(( CLI_ARGS_CNT + 1 ))
esac
fi
done
done
dbg "given arguments count: '$CLI_ARGS_CNT'"
}
#######################################
# Validate single argument type
# Globals:
# None
# Arguments:
# $1, $2, $3
# Returns:
# None
#######################################
validate_arg_type() {
local name="$1"
local type="$2"
local value="$3"
local re
if [ -z "$name" -o -z "$type" -o -z "$value" ]; then
err "name: '$name', type: '$type', value: '$value'"
fi
if [[ "$type" = "number" ]]; then
re='^[0-9]+$'
if ! [[ $value =~ $re ]] ; then
err "'$name' = '$value' => is not a '$type' (${re})"
exit 1
fi
elif [ $type = "word" ]; then
re='^[a-zA-Z0-9_-]+$'
if ! [[ $value =~ $re ]]; then
err "'$name' = '$value' => is not a '$type' (${re})"
exit 1
fi
elif [ $type = "alnum" ]; then
re='^[a-zA-Z0-9\.]+$'
if ! [[ $value =~ $re ]]; then
err "'$name' = '$value' => is not a '$type' (${re})"
exit 1
fi
elif [[ $type = "uri" ]]; then
re='^[a-zA-Z\;\:\\\/]+.*'
if ! [[ $value =~ $re ]]; then
err "'$name' = '$value' => is not a '$type' (${re})"
exit 1
fi
elif [[ $type = "filepath" ]]; then
re='.*'
if ! [[ $value =~ $re ]]; then
err "'$name' = '$value' => is not a '$type' (${re})"
exit 1
fi
elif [[ $type = "text" ]]; then
re='^[a-zA-Z0-9\;\.\s\\\/]+.*'
if ! [[ $value =~ $re ]]; then
err "'$name' = '$value' => is not a '$type' (${re})"
exit 1
fi
elif [[ $type = "None" ]]; then
true
else
err "'$name' = '$value' => unknown argument type, validation error"
exit 1
fi
}
#######################################
# Generate psql command
# Globals:
# PSQL_CONN_OPTIONS, HOST, OPTIONS_ERROR_EXIT
# USERNAME, PGPASSWORD, DBNAME, STIMEOUT
# Arguments:
# None
# Returns:
# None
#######################################
generate_psql_cmd() {
local pg_port=$DEFAULT_PG_PORT
if [[ "$PGPORT" != "None" ]]; then
pg_port=$PGPORT
fi
# custom UNIX domain socket directory for PostgreSQL
local psql_unix_socket_option=""
if [[ "${PGSOCKET}" != "None" ]]; then
psql_unix_socket_option=" --host '${PGSOCKET}' "
fi
# custom psql binary path support
local psql_bin="psql"
if [[ "${PSQLBINARY}" != "None" ]]; then
psql_bin="${PSQLBINARY}"
fi
# generate or not PGPASSWORD string (for substitution)
if [[ ! -z ${PGPASSWORD+x} ]]; then
local pgpas_subst="PGPASSWORD=\"${PGPASSWORD}\" " # whitespace in the end of the string
else
local pgpas_subst=""
fi
# use default Postgres username or not
local user_substr=""
if [[ ! -z ${USERNAME+x} ]]; then
user_substr=" -U \"${USERNAME}\" "
fi
# Construct _PSQL macro for usage inside the check scripts
export PSQL_CONN_OPTIONS="--port=${pg_port} --dbname=${DBNAME} ${user_substr} ${psql_unix_socket_option}"
psql_command="${pgpas_subst}${psql_bin} -1 -X -At -q -v ON_ERROR_STOP=1 -P pager=off ${PSQL_CONN_OPTIONS}"
export _PSQL_NO_TIMEOUT="PGAPPNAME=checkup ${psql_command}"
export _PSQL="PGAPPNAME=checkup PGOPTIONS=\"-c statement_timeout=${STIMEOUT}s\" ${psql_command}"
dbg ""
dbg "PSQL_CONN_OPTIONS: $PSQL_CONN_OPTIONS"
dbg ""
}
#######################################
# Validate arguments and and save input variables
# Globals:
# CLI_ARGS_POSSIBLE, SECTION[], SHORT_NAME[],
# FULL_NAME[], ARG_TYPE[], MANDATARY[], DESCRIPTION[],
# ARG_VALUE[], ARG_IS_GIVEN[], INTERNAL_*, CLI_ARGS_CNT,
# MANDATORY[], PSQL_CONN_OPTIONS, HOST, OPTIONS_ERROR_EXIT
# Arguments:
# None
# Returns:
# None
#######################################
validate_args() {
local i
local x=0
local need_fail_exit="false"
if [[ "${CLI_ARGS_CNT}" -lt 1 ]]; then
usage "No arguments are provided, at least one is needed." "1"
fi
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
if [[ ! -z "${ARG_IS_GIVEN[$i]+x}" ]]; then
# generate dynamic variables like "$HOST" or "$PSQLBINARY"
# from './resources/cli.conf' (INTERNAL_NAME)
eval "export ${INTERNAL_NAME[$i]}=\"${ARG_VALUE[$i]}\""
validate_arg_type "${FULL_NAME[$i]}" "${ARG_TYPE[$i]}" "${ARG_VALUE[$i]}"
else
export "${INTERNAL_NAME[$i]}=None"
fi
done
# fill default (not given) psql connection related variables
[[ "${DBNAME}" = "None" ]] && export DBNAME=postgres
[[ "${STIMEOUT}" = "None" ]] && export STIMEOUT=30 # statement timeout
[[ "${USERNAME}" = "None" ]] && export USERNAME=""
[[ "${LISTLIMIT}" = "None" ]] && export LISTLIMIT=${DEFAULT_LIST_LIMIT}
[[ "${CONNTIMEOUT}" = "None" ]] && export CONNTIMEOUT=${DEFAULT_CONNECTION_TIMEOUT} # connection timeout
if [[ "${MODE}" = "None" ]]; then
export MODE="run"
ARG_VALUE[0]="run"
ARG_IS_GIVEN[0]="true"
fi
generate_psql_cmd
if ([[ "$HTML" == "true" ]] || [[ "$PDF" == "true" ]]); then
PANDOC=$(which pandoc || echo -n "0");
if [[ "$PANDOC" == "0" ]]; then
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ERROR: 'pandoc' not found. Cannot generate PDF/HTML." >&2
exit 1
fi
fi
if [[ "$PDF" == "true" ]]; then
WKHTMLTOPDF=$(which wkhtmltopdf || echo -n "0");
if [[ "$WKHTMLTOPDF" == "0" ]]; then
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ERROR: 'wkhtmltopdf' not found. Cannot generate PDF." >&2
exit 1
fi
wkhtmltopdf_ver_resp=$(wkhtmltopdf -V)
wkhtmltopdf_ver_resp_lines=${wkhtmltopdf_ver_resp// /\\n}
wkhtmltopdf_current_ver=$(echo -e $wkhtmltopdf_ver_resp_lines | awk '/([0-9.]+)/')
wkhtmltopdf_required_ver="0.12.4"
if [ "$(printf '%s\n' "$wkhtmltopdf_required_ver" "$wkhtmltopdf_current_ver" | sort -V | head -n1)" != "$wkhtmltopdf_required_ver" ]; then
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ERROR: 'wkhtmltopdf' version is outdated. Update to $wkhtmltopdf_required_ver or newer. See README. Cannot generate PDF." >&2
exit 1
fi
fi
# error if mandatory options are not set (print as a stack)
local buf=""
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
if [[ "${MANDATARY[$i]}" == "mandatory" ]] && [[ -z "${ARG_IS_GIVEN[$i]+x}" ]] ; then
if ([[ "${ARG_MODE[$i]}" == "all" ]] || ( [[ "${ARG_MODE[$i]}" != "all" ]] && [[ "${ARG_MODE[$i]}" =~ "${MODE}" ]])); then
# please do not change align for 'buf' variable text
buf="$buf
mandatory option '--${FULL_NAME[$i]}' is not set"
need_fail_exit=true
OPTIONS_ERROR_EXIT=true
fi
fi
done
if [[ "$HOST" == "None" ]] && [[ "$SSHHOST" == "None" ]] &&
[[ "$PGHOST" == "None" ]] && ([[ "$MODE" == "collect" ]] || [[ "$MODE" == "run" ]]) ; then
buf="$buf
at least one of options '--hostname', '--ssh-hostname' or '--pg-hostname' must be set"
# mandatory option '--hostname' is not set"
need_fail_exit=true
OPTIONS_ERROR_EXIT=true
fi
local hosts=0
[[ "$SSHHOST" != "None" ]] && hosts=$((hosts + 1))
[[ "$PGHOST" != "None" ]] && hosts=$((hosts + 1))
[[ "$HOST" != "None" ]] && hosts=$((hosts + 1))
if [[ $hosts -gt 1 ]]; then
buf="$buf
only one of options '--hostname', '--ssh-hostname' or '--pg-hostname' may be used"
need_fail_exit=true
OPTIONS_ERROR_EXIT=true
fi
if [[ "$SSHPORT" != "None" ]] && ([[ "$PGHOST" != "None" ]] || [[ "$HOST" != "None" ]]) ; then
buf="$buf
'--ssh-port' may be used only with '--ssh-hostname'"
need_fail_exit=true
OPTIONS_ERROR_EXIT=true
fi
if [[ "$need_fail_exit" = "true" ]]; then
usage "$buf" "1"
fi
}
#######################################
# Generate usage/help
# Globals:
# CLI_ARGS_POSSIBLE, FULL_NAME[], SECTION[]
# SCRIPT_NAME, SHORT_NAME[], DESCRIPTION[]
# Arguments:
# description exit_code code
# Returns:
# (text) stdout/stderr
#######################################
usage() {
local i
local description="$1"
local exit_code="$2"
local exit_code=${exit_code:=0}
local out_descriptor
local re="[a-zA-Z]"
if [[ ! "$description" =~ $re ]]; then
err "First argument of 'usage' must be a text description"
exit 1
fi
# if error: print reason before 'Usage:'
if [[ "$exit_code" -ne "0" ]]; then
out_descriptor="2" # STDERR
echo "ERROR: " >&${out_descriptor}
echo " $description" >&${out_descriptor}
echo >&${out_descriptor}
else
out_descriptor="1" # STDOUT
# help part starts here
echo >&${out_descriptor}
echo "POSTGRES-CHECKUP collects deep diagnostics of a Postgres database's health." >&${out_descriptor}
echo "Project home: https://gitlab.com/postgres-ai-team/postgres-checkup." >&${out_descriptor}
echo >&${out_descriptor}
fi
echo "Usage:" >&${out_descriptor}
echo " ${SCRIPT_NAME} OPTION [OPTION] ..." >&${out_descriptor}
echo " ${SCRIPT_NAME} help" >&${out_descriptor}
if [[ "$exit_code" -ne "0" ]]; then
exit "$exit_code"
fi
echo >&${out_descriptor}
echo "postgres-checkup can separately collect, process and upload data to server. " >&${out_descriptor}
echo "You can set the working mode with --mode option." >&${out_descriptor}
echo "Available values for mode: 'collect', 'process', 'upload', 'run'." >&${out_descriptor}
echo "Mode 'run' executes collecting and processing at once, it is a default mode." >&${out_descriptor}
# Printing CLI options starts here
# calc max size of FULL_NAME[] for text alignment
local max_name_len=0
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
bytlen=${#FULL_NAME[$i]}
if [[ "$bytlen" -ge "$max_name_len" ]]; then
max_name_len=$bytlen
fi
done
local space
local prev_section="Misc"
for i in $(seq 0 ${CLI_ARGS_POSSIBLE}); do
if [[ "${SECTION[$i]}" != "$prev_section" ]] || [[ -z ${SECTION[0]} ]] ; then
echo >&${out_descriptor}
echo "${SECTION[$i]}:" >&${out_descriptor}
fi
[[ "${SHORT_NAME[$i]}" = "None" ]] && SHORT_NAME[$i]=" "
if [[ "${SHORT_NAME[$i]}" = " " ]]; then
echo -n " "${SHORT_NAME[$i]} >&${out_descriptor}
else
echo -n " -"${SHORT_NAME[$i]}"," >&${out_descriptor}
fi
curr_byte_len=${#FULL_NAME[$i]}
# print space padding
for f in $(seq 0 $(( max_name_len - curr_byte_len + 2 )) ); do
echo -n " " >&${out_descriptor}
done
echo -n " --"${FULL_NAME[$i]} >&${out_descriptor}
echo " "${DESCRIPTION[$i]} >&${out_descriptor}
# print options about this usage/help and additional info in the last iteration
curr_byte_len="help"
curr_byte_len=${#curr_byte_len}
if [[ "$i" -eq "$CLI_ARGS_POSSIBLE" ]]; then
echo -n " -?," >&${out_descriptor}
for f in $(seq 0 $(( max_name_len - curr_byte_len + 2 )) ); do
echo -n " " >&${out_descriptor}
done
echo -n " --help" >&${out_descriptor}
echo " this help" >&${out_descriptor}
fi
prev_section=${SECTION[$i]}
done
# Print example
echo >&${out_descriptor}
echo "Example:" >&${out_descriptor}
echo " PGPASSWORD=mypasswd ./${SCRIPT_NAME} collect -h [ssh_user]@host_to_connect_via_ssh \\"
echo " --username ${USER} --dbname postgres \\"
echo " --project dummy ${BOLD}-e %EPOCH_NUMBER%${RESET}" >&${out_descriptor}
echo >&${out_descriptor}
echo "Comments, ideas, bug reports? https://gitlab.com/postgres-ai/postgres-checkup" >&${out_descriptor}
exit $exit_code
}
#######################################
# Generate json report
# Globals:
# CURRENT_CHECK_FNAME, SCRIPT_DIR, PROJECT,
# HOST, JSON_REPORTS_DIR, TIMESTAMP_DIR,
# TIMESTAMPTZ, MD_REPORTS_DIR
# Arguments:
# input, check_id
# Returns:
# (text) stdout/stderr
#######################################
generate_report_json() {
local input_json="$1"
local check_id="$2"
local check_name="$3"
local epoch="null"
[[ -z ${3+x} ]] && err "function needs 3 arguments"
# insert json object data into template
local template_fname="${SCRIPT_DIR}/resources/templates/report.json"
local tmp_input_json_fname=$(mktemp "${SCRIPT_DIR}"/artifacts/${check_id}_tmp_XXXXXX)
# save function's input as a temporary file
echo "$input_json" > "$tmp_input_json_fname"
# final report file name
local json_output_fname="${JSON_REPORTS_DIR}/${check_id}_${check_name}.json"
# use template or existing file
if [[ -f "$json_output_fname" ]]; then
local json_input_fname="${json_output_fname}"
else
local json_input_fname="${template_fname}"
fi
local tmp_output_json_fname=$(mktemp "${JSON_REPORTS_DIR}"/${check_id}_${check_name}_tmp_XXXXXX)
jq -r \
--argfile Results "${tmp_input_json_fname}" \
--arg CheckId "${check_id}" \
--arg CheckName "${check_name}" \
--arg TimestampTz "${TIMESTAMPTZ}" \
--arg Host "${HOST}" \
--arg Project "${PROJECT}" \
--arg Database "${DBNAME}" \
'.checkId = $CheckId | .name = $CheckName | ."timestamptz" = $TimestampTz | ."project" = $Project | ."database" = $Database | .results += { ($Host): { data: $Results } }' \
"${json_input_fname}" \
> "${tmp_output_json_fname}"
mv "${tmp_output_json_fname}" "${json_output_fname}"
rm "$tmp_input_json_fname"
# extend check for current host with actual 'nodes.json' inside a json report
tmp_output_json_fname=$(mktemp "${JSON_REPORTS_DIR}"/${check_id}_${check_name}_tmp_ex_XXXXXX)
jq --argfile nodes_json "${PROJECT_DIR}/nodes.json" \
'.results.'\"${HOST}\"'."nodes.json" = $nodes_json' \
"${json_output_fname}" \
> "$tmp_output_json_fname"
mv "$tmp_output_json_fname" "${json_output_fname}"
# update json report by attaching 'nodes.json' into top of the report
tmp_output_json_fname=$(mktemp "${JSON_REPORTS_DIR}"/${check_id}_${check_name}_tmp_ex_XXXXXX)
jq --argfile nodes_json "${PROJECT_DIR}/nodes.json" \
'.last_nodes_json = $nodes_json' \
"${json_output_fname}" \
> "$tmp_output_json_fname"
mv "$tmp_output_json_fname" "${json_output_fname}"
msg "JSON report saved at: '${json_output_fname}'"
}
#######################################
# Check is host in recovery mode or not
#
# Do not use this function before 'host_pre_start_checks()'
#
# Globals:
# HOST
# Arguments:
# None
# Returns:
# Integer
#######################################
is_in_recovery() {
local res="$(${CHECK_HOST_CMD} "${_PSQL} -c \"select * from pg_is_in_recovery()\"")"
if [[ "$res" = "f" ]]; then
dbg "host $HOST is 'master'"
return 12
elif [[ "$res" = "t" ]]; then
dbg "host $HOST is 'standby'"
return 0
else
msg "ERROR: Cannot connect to the host: ${HOST}"
exit 1
fi
return 13
}
#######################################
# Check the number of objects in the database:
# return 0 if the database has more than LARGE_DB_ITEMS_COUNT
# (100000) objects, 1 otherwise.
#
# Do not use this function before 'host_pre_start_checks()'
#
# Globals:
# _PSQL, LARGE_DB_ITEMS_COUNT
# Arguments:
# None
# Returns:
# Integer
#######################################
is_large_database() {
local res="$(${CHECK_HOST_CMD} "${_PSQL} -c \"select count(*) from pg_class\"")"
if [[ "$res" -gt $LARGE_DB_ITEMS_COUNT ]]; then
return 0
else
return 1
fi
}
#######################################
# Check binary dependencies
# Globals:
# KERNEL_NAME, OS_NAME, timeout()
# Arguments:
# None
# Returns:
# (text) stdout/stderr
#######################################
check_bin_deps() {
# detect OS
export KERNEL_NAME=$(uname)
if [[ "${KERNEL_NAME}" =~ "Darwin" ]]; then
export OS_NAME="macOS"
dbg "This is macOS"
elif [[ "${KERNEL_NAME}" =~ "Linux" ]]; then
export OS_NAME="Linux"
dbg "This is Linux"
else
export OS_NAME="Unknown"
dbg "Can't detect OS name"
fi
###### Checking the existence of commands #####
# timeout
if ! $(which gtimeout >/dev/null 2>&1) && ! $(which timeout >/dev/null 2>&1); then
err "Can't find the 'timeout' executable. Please install it:"
if [[ "${KERNEL_NAME}" = "Darwin" ]]; then
err "${OS_NAME}: 'brew install coreutils'"
elif [[ "${KERNEL_NAME}" = "Linux" ]]; then
err "Debian/Ubuntu GNU/${OS_NAME}: 'sudo apt-get install coreutils'"
err "RHEL/CentOS GNU/${OS_NAME}: 'sudo yum install coreutils'"
fi
return 1
else
# redefine command (alias won't work inside the script)
if [[ "${KERNEL_NAME}" = "Darwin" ]]; then
timeout() {
gtimeout "$@"
}
fi
fi
# awk
if ! $(which awk >/dev/null 2>&1); then
err "Can't find the 'awk' executable. Please install it:"
if [[ "${KERNEL_NAME}" = "Darwin" ]]; then
err "${OS_NAME}: 'brew install gawk'"
elif [[ "${KERNEL_NAME}" = "Linux" ]]; then
err "Debian/Ubuntu GNU/${OS_NAME}: 'sudo apt-get install gawk'"
err "RHEL/CentOS GNU/${OS_NAME}: 'sudo yum install gawk'"
fi
return 1
fi
# jq
if ! $(which jq >/dev/null 2>&1); then
err "Can't find the 'jq' executable. Please install it:"
if [[ "${KERNEL_NAME}" = "Darwin" ]]; then
err "${OS_NAME}: 'brew install jq'"
elif [[ "${KERNEL_NAME}" = "Linux" ]]; then
err "Debian/Ubuntu GNU/${OS_NAME}: 'sudo apt-get install jq'"
err "RHEL/CentOS GNU/${OS_NAME}: 'sudo yum install jq'"
fi
return 1
fi
# jq version
re="jq([0-9]+.[0-9]+).+"
local jq_version_full=$(jq --version)
local version="" # short form (e.g. '1.11')
local jq_version_num="" # get only first two numbers devided by '.'
if [[ "${jq_version_full}" =~ $re ]]; then
version="${BASH_REMATCH[1]}"
local major=${version%.*}
local minor=${version#*.}
jq_version_num="${major}${minor}"
if [[ $(( go_version_num % 1000 )) -lt "15" ]]; then
err "Unsupported jq version '${jq_version_full}'"
err "Please install jq version >= '1.5'"
return 1
fi
fi
}
#######################################
# Glue all .md file together
# (makes final report)
# Globals:
# PROJECT_DIR, FULL_REPORT_FNAME, EPOCH,
# DBNAME, HOST, MD_REPORTS_DIR
# Arguments:
# None
# Returns:
# Integer
#######################################
glue_md_reports() {
# final report path and name
local out_fname="${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}"
local epoch=$(jq -r '.last_check.epoch' ${PROJECT_DIR}/nodes.json)
local database=$(jq -r '.last_check.database' ${PROJECT_DIR}/nodes.json)
# do not re-generate full report if '--file' is given
[[ "${FILE}" != "None" ]] && return 0
# make header
echo "# PostgreSQL Checkup. Project: '${PROJECT}'. Database: '${database}'" > "${out_fname}"
echo "## Epoch number: '${epoch}'" >> "${out_fname}"
echo "NOTICE: while most reports describe the “current database”, some of them may contain cluster-wide information describing all databases in the cluster." >> "${out_fname}"
echo >> "${out_fname}"
echo "Last modified at: " $(date +'%Y-%m-%d %H:%M:%S %z') >> "${out_fname}"
echo >> "${out_fname}"
tableOfContents=""
content=""
summaryTable="\n---\n### Issues found ###\n\nThe empty lines represent reports for which Conclusions and Recommendations are not yet implemented.\n\n|Report|P1|P2|P3|\n|-----|---|---|---|"
echo "" >> "${out_fname}"
echo "<a name=\"postgres-checkup_top\">&nbsp;</a>" >> "${out_fname}"
echo "### Table of contents ###" >> "${out_fname}"
#generate table of contents and glue reports together
for cur_report in "${MD_REPORTS_DIR}"/*.md; do
[[ -e "${cur_report}" ]] || continue
[[ "${cur_report}" != "${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}" ]] || continue
title=$(head -n 1 ${cur_report})
title="${title/\#/}"
title="${title/\#/}"
title="${title#"${title%%[![:space:]]*}"}"
title="${title%"${title##*[![:space:]]}"}"
checkId=$(echo $title | cut -c 1-4)
tableOfContents="$tableOfContents\n[$title](#postgres-checkup_$checkId) "
content="$content\n\n\n---\n<a name=\"postgres-checkup_$checkId\">&nbsp;</a>\n[Table of contents](#postgres-checkup_top)"
report=$(cat "${cur_report}")
content="$content\n$report"
fileCheckId=$checkId
if [[ "$fileCheckId" =~ "K" ]]; then
if [[ "$fileCheckId" != "K003" ]]; then
summaryTable="${summaryTable}\n|[$title](#postgres-checkup_$checkId)||||"
continue
fi
fileCheckId="K000";
fi
jsonFile=$(ls ${JSON_REPORTS_DIR}/${fileCheckId}*.json)
p1=$(jq '.recommendations | [.[]|.Message|startswith("[P1]")] | map(select(. == true)) | length' ${jsonFile} 2>/dev/null \
|| jq 'if .p1 == false then 0 elif .p1 == true then "!" else "" end' ${jsonFile} 2>/dev/null || echo "")
p2=$(jq '.recommendations | [.[]|.Message|startswith("[P2]")] | map(select(. == true)) | length' ${jsonFile} 2>/dev/null \
|| jq 'if .p2 == false then 0 elif .p2 == true then "!" else "" end' ${jsonFile} 2>/dev/null || echo "")
p3=$(jq '.recommendations | [.[]|.Message|startswith("[P3]")] | map(select(. == true)) | length' ${jsonFile} 2>/dev/null \
|| jq 'if .p3 == false then 0 elif .p3 == true then "!" else "" end' ${jsonFile} 2>/dev/null || echo "")
summaryTable="${summaryTable}\n|[$title](#postgres-checkup_$checkId)|${p1//\"/}|${p2//\"/}|${p3//\"/}|"
done
echo -e "$tableOfContents" >> "${out_fname}"
echo -e "$summaryTable\n\n" >> "${out_fname}"
echo -e "$content" >> "${out_fname}"
}
#######################################
# Configure SSH connection
# Globals:
# CHECK_HOST_CMD, SSH_SUPPORT, SSHPORT, PORT
# Arguments:
# (text) host name
# Returns:
# Integer
#######################################
configure_ssh_connection() {
hostname=$1
if [[ "$SSHPORT" == "None" ]]; then
if [[ "$PORT" != "None" ]]; then
SSHPORT=$PORT
else
SSHPORT=$DEFAULT_SSH_PORT
fi
fi
if native_hostname=$(ssh -p ${SSHPORT} -o ConnectTimeout=10 ${hostname} "hostname" 2>/dev/null); then
# ssh to remote host and use local psql (default)
export CHECK_HOST_CMD="ssh ${hostname}"
export SSH_SUPPORT="true"
return 0
else
return 1
fi
}
#######################################
# Configure psql connection
# Globals:
# CHECK_HOST_CMD, SSH_SUPPORT, PGPORT, PORT
# Arguments:
# (text) host name
# Returns:
# Integer
#######################################
configure_psql_connection() {
hostname=$1
if [[ "$PGPORT" == "None" ]]; then
if [[ "$PORT" != "None" ]]; then
PGPORT=$PORT
else
PGPORT=$DEFAULT_PG_PORT
fi
generate_psql_cmd
fi
export CHECK_HOST_CMD="bash -e -u -o pipefail -c"
# use local psql and connect to remote postgres without ssh
local re="127\.0"
if [[ "${hostname}" =~ $re || "${hostname}" = "localhost" ]]; then
# keep _PSQL and _PSQL_NO_TIMEOUT as is, use UNIX domain socket for psql
true
else
# use TCP for psql
export _PSQL="PGCONNECT_TIMEOUT=${CONNTIMEOUT} ${_PSQL} -h ${hostname}"
export _PSQL_NO_TIMEOUT="${_PSQL_NO_TIMEOUT} -h ${hostname}"
fi
export SSH_SUPPORT="false"
}
#######################################
# Check rights, set global variables, etc.
# Globals:
# HOST, CHECK_HOST_CMD
# SSH_SUPPORT
# Arguments:
# None
# Returns:
# Integer
#######################################
host_pre_start_checks() {
if [[ "$SSHHOST" != "None" ]]; then
# ssh mode
if configure_ssh_connection $SSHHOST; then
msg "Connection type (specified): SSH ($SSHHOST:$SSHPORT)"
export HOST=$SSHHOST
else
msg "Connection type (specified): SSH ($SSHHOST:$SSHPORT)"
msg "ERROR: Cannot connect to the host: ${SSHHOST}:${SSHPORT} via SSH"
exit 1
fi
elif [[ "$PGHOST" != "None" ]]; then
# psql mode
if configure_psql_connection $PGHOST; then
msg "Сonnection type (specified): PostgreSQL ($PGHOST:$PGPORT)"
export HOST=$PGHOST
fi
else
# auto detect connection command
if configure_ssh_connection $HOST; then
msg "Connection type (auto-detected): SSH ($HOST:$SSHPORT)"
else
msg "Cannot connect to the host: ${HOST}:${SSHPORT} via SSH"
configure_psql_connection $HOST;
msg "Connection type (auto-detected): PostgreSQL ($HOST:$PGPORT)"
fi
export HOST
fi
dbg "CHECK_HOST_CMD: '${CHECK_HOST_CMD}'"
dbg "HOST: '${HOST}'"
dbg "_PSQL: '${_PSQL}'"
dbg "_PSQL_NO_TIMEOUT: '${_PSQL_NO_TIMEOUT}'"
}
#######################################
# Update/create 'nodes.json' in the project dir
# Fill/update hostname, role, internal alias
# Start/update 'epoch' of the check
# Globals:
# PROJECT_DIR, HOST,
# ALIAS_NAME, ALIAS_INDEX, ROLE, JSON_REPORTS_DIR
# TIMESTAMP_DIR, TIMESTAMPTZ, SHORT_DIR_NAME,
# MD_REPORTS_DIR
# Arguments:
# None
# Returns:
# (text) stdout/stderr
#######################################
update_nodes_json() {
local prev_role=""
local prev_epoch=""
local prev_role=""
local prev_dir=""
local host_is_new="false"
ALIAS_NAME=""
ALIAS_INDEX=""
ROLE=""
# check 'is role has been changed?"
if is_in_recovery; then
ROLE="standby"
else
ROLE="master"
fi
# if file 'nodes.json' does not exist generate alias for a first host
if [[ ! -f "${PROJECT_DIR}/nodes.json" ]]; then
ALIAS_NAME="node1"
ALIAS_INDEX="1"
prev_role="${ROLE}"
JSON_REPORTS_DIR="${PROJECT_DIR}/json_reports/${EPOCH}_${TIMESTAMP_DIR}"
MD_REPORTS_DIR="${PROJECT_DIR}/md_reports/${EPOCH}_${TIMESTAMP_DIR}"
SHORT_DIR_NAME="${EPOCH}_${TIMESTAMP_DIR}"
local input_json_fname="${SCRIPT_DIR}/resources/templates/nodes.json"
# create dirs, etc.
else
# error if two dir names like '${EPOCH}_*' exist
local cur_epochs_count="0"
cur_epochs_count=$(find "${PROJECT_DIR}/json_reports/" -type d -name "${EPOCH}_*" | wc -l)
if [[ "$cur_epochs_count" -ge "2" ]]; then
err "more then one directories witn name '${EPOCH}_*' exist:"
find "${PROJECT_DIR}/json_reports/" -type d -name "${EPOCH}_*" >&2 || true
exit 1
fi
# read current nodes.json
ALIAS_NAME=$(jq -r '.hosts.'\"${HOST}\"'.internal_alias' "${PROJECT_DIR}/nodes.json")
# mark host as new if we can't find alias by path with HOST
if [[ "${ALIAS_NAME}" = "null" ]]; then
host_is_new="true"
dbg "host '${HOST}' is a new host"
fi
ALIAS_INDEX=$(jq -r '.hosts.'\"${HOST}\"'.index' "${PROJECT_DIR}/nodes.json")
prev_role=$(jq -r '.hosts.'\"${HOST}\"'.role' "${PROJECT_DIR}/nodes.json")
prev_epoch=$(jq -r '.last_check.epoch' "${PROJECT_DIR}/nodes.json")
# create a new epoch dirs or use existing
if test -d "${PROJECT_DIR}/json_reports/${EPOCH}"_*/../; then
SHORT_DIR_NAME=$(find "${PROJECT_DIR}/json_reports/" -type d -name ${EPOCH}_*)
SHORT_DIR_NAME=$(basename "${SHORT_DIR_NAME}")
JSON_REPORTS_DIR="${PROJECT_DIR}/json_reports/${SHORT_DIR_NAME}"
MD_REPORTS_DIR="${PROJECT_DIR}/md_reports/${SHORT_DIR_NAME}"
else
SHORT_DIR_NAME="${EPOCH}_${TIMESTAMP_DIR}"
JSON_REPORTS_DIR="${PROJECT_DIR}/json_reports/${SHORT_DIR_NAME}"
MD_REPORTS_DIR="${PROJECT_DIR}/md_reports/${SHORT_DIR_NAME}"
fi
local input_json_fname="${PROJECT_DIR}/nodes.json"
fi
export JSON_REPORTS_DIR MD_REPORTS_DIR SHORT_DIR_NAME
mkdir -p "${JSON_REPORTS_DIR}"
local role_changed_at="never"
if [[ "${prev_role}" != "${ROLE}" ]] && [[ "$prev_role" != "null" ]]; then
msg "WARNING: important difference between checks detected:"
msg "WARNING: host's role has been changed from '${prev_role}' to '${ROLE}'"
role_changed_at="${TIMESTAMPTZ}"
fi
# generate new ALIAS_INDEX and ALIAS_NAME for a new host
if [[ "$host_is_new" = "true" ]]; then
# get maximum index
local prev_index=$(jq -r '["\(.hosts | to_entries[] | .value | .index)" | tonumber] | max' "${PROJECT_DIR}/nodes.json")
local new_alias_index=$(( prev_index + 1 ))
local new_alias_name="node${new_alias_index}"
ALIAS_INDEX="${new_alias_index}"
ALIAS_NAME="${new_alias_name}"
fi
export ALIAS_NAME ALIAS_INDEX ROLE
dbg "ALIAS_NAME: '$ALIAS_NAME'"
dbg "ALIAS_INDEX: '$ALIAS_INDEX'"
dbg "ROLE: '$ROLE'"
# finally, fill nodes.json file
local result_fname="${PROJECT_DIR}/nodes.json"
local result_fname_tmp=$(mktemp "${PROJECT_DIR}"/nodes.json.tmp_XXXXXX)
jq --arg Hostname "${HOST}" \
--arg internal_alias "${ALIAS_NAME}" \
--arg index ${ALIAS_INDEX} \
--arg role "${ROLE}" \
--arg TimestampTz "${TIMESTAMPTZ}" \
--arg dir "${SHORT_DIR_NAME}" \
--arg epoch "${EPOCH}" \
--arg database "${DBNAME}" \
--arg role_change_detected_at "${role_changed_at}" \
'.hosts += { ($Hostname): { $internal_alias, $index, $role, $role_change_detected_at } } | .last_check = { epoch: $epoch, timestamptz: $TimestampTz, dir: $dir, database: $database }' \
"${input_json_fname}" \
> "${result_fname_tmp}"
mv "${result_fname_tmp}" "${result_fname}"
TIMESTAMP_DIRNAME=$(jq -r '.last_check.dir' "${result_fname}")
export TIMESTAMP_DIRNAME
}
#######################################
# Perform checks
# All checks must return valid json into stdout
# Globals:
# CURRENT_CHECK_FNAME, SCRIPT_DIR, HOST, PROJECT_DIR,
# HOST, PGHREP_BIN
# and all exported internal globals
# Arguments:
# None
# Returns:
# (text) stdout/stderr
#######################################
run_checks() {
# perform pre-checks here
local pghrep_failed_md_stack=""
local check_failed_json_stack=""
export PROJECT_DIR="${SCRIPT_DIR}/artifacts/${PROJECT}"
test -d "${PROJECT_DIR}" || mkdir -p "${PROJECT_DIR}"
# perform all checks from './resources/checks/' directory
if is_in_recovery; then
ROLE="standby"
else
ROLE="master"
fi
declare -a skip_on_replica=("F002_autovacuum_wraparound" "F003_autovacuum_dead_tuples"
"F004_heap_bloat" "F005_index_bloat" "F008_autovacuum_resource_usage" "H001_invalid_indexes" "H003_non_indexed_fks"
"L001_table_sizes" "L003_integer_in_pk")
if is_large_database; then
dbg "The database has more than ${LARGE_DB_ITEMS_COUNT} objects. To speed up the performance, small objects will not be analyzed."
export IS_LARGE_DB=1
else
dbg "The database has less than ${LARGE_DB_ITEMS_COUNT} objects."
export IS_LARGE_DB=0
fi
msg
msg "########## Perform checks for host '${HOST}':"
local output
for CURRENT_CHECK_FNAME in "${SCRIPT_DIR}"/resources/checks/*_*.sh; do
[[ -e "${CURRENT_CHECK_FNAME}" ]] || continue
# overwrite all checks to a single check from a file (CLI)
if [[ "${FILE}" != "None" ]]; then
CURRENT_CHECK_FNAME="${FILE}"
if ! [[ -e "${CURRENT_CHECK_FNAME}" ]]; then
err "Can't open file: '${CURRENT_CHECK_FNAME}'"
fi
fi
local CHECK_FNAME=$(basename "$CURRENT_CHECK_FNAME")
local CHECK_NAME="${CHECK_FNAME%.*}"
local skip_report="0"
for item in "${skip_on_replica[@]}"; do
if [[ $ROLE == "standby" && "${item}" == "$CHECK_NAME" ]]; then
skip_report="1"
fi
done
if [[ $skip_report == "1" ]]; then
dbg 'Report' $CHECK_NAME': skip on standby.'
continue
fi
msg
msg "=== Run on '${HOST}': ${CURRENT_CHECK_FNAME} ==="
local check_is_failed="false"
# fix timestamp before check
export TIMESTAMP_DIR=$(date +'%Y_%m_%dT%H_%M_%S_%z') # for use in directories names
export TIMESTAMPTZ=$(date +'%Y-%m-%d %H:%M:%S.0%z')
# alot of magic is here
update_nodes_json
# perform a check from file
output=$(set -euo pipefail ; source "$CURRENT_CHECK_FNAME") || check_is_failed="true"
dbg "is check failed?: $check_is_failed"
msg "=== End of running ==="
# extract checkId from check script path
local check_path_name="$CURRENT_CHECK_FNAME"
local check_id_tmp_a=${check_path_name%%_*.sh}
local check_id="${check_id_tmp_a##*/}"
local check_name_a="${check_path_name##*/}"
local check_name_b="${check_name_a#*_}"
local check_name="${check_name_b%.sh}"
if [[ "$check_is_failed" == "true" ]]; then
msg "WARNING: check '${check_id}' failed with error ^^^"
check_failed_json_stack="${check_failed_json_stack} ${check_id}_${check_name}.json"
if [[ "${FILE}" != "None" ]]; then
break
else
continue
fi
fi
# generate/extend report
generate_report_json "$output" "$check_id" "$check_name"
# if check name file given by CLI run only single check
if [[ "${FILE}" != "None" ]]; then
break
fi
done
msg
msg "All checks have been finished for host '$HOST'!"
# print stacks with failed reports
if ! [[ -z "${check_failed_json_stack}" ]]; then
msg
msg "WARNING: Can't generate JSON reports (see errors above):"
local i=""
for i in ${check_failed_json_stack}; do
echo " '${i}'"
done
fi
}
#######################################
# Generate .md (markdown) reports.
# Additionally, if requested, using .md files, generate HTML and PDF reports.
# Globals:
# SCRIPT_DIR, PGHREP_BIN
# Arguments:
# None
# Returns:
# (text) stdout/stderr
#######################################
run_process() {
local pghrep_failed_md_stack=""
PROJECT_DIR="${SCRIPT_DIR}/artifacts/${PROJECT}"
local timestamp_dir=$(cat ${SCRIPT_DIR}/artifacts/${PROJECT}/nodes.json | jq -r '.last_check | .dir')
local epoch=$(cat ${SCRIPT_DIR}/artifacts/${PROJECT}/nodes.json | jq -r '.last_check | .epoch')
JSON_REPORTS_DIR="${PROJECT_DIR}/json_reports/${timestamp_dir}"
MD_REPORTS_DIR="${PROJECT_DIR}/md_reports/${timestamp_dir}"
mkdir -p "${MD_REPORTS_DIR}"
for json_file in `ls ${JSON_REPORTS_DIR}/*.json`; do
check_id=$(cat ${json_file} | jq -r '.checkId')
# if check name file given by CLI run only single generation
if [[ "${FILE}" != "None" ]] && ! [[ "${FILE}" =~ "/${check_id}" ]]; then
continue
fi
local pghrep_output
if ! pghrep_output=$(${PGHREP_BIN} --checkdata="${json_file}" \
--outdir="${MD_REPORTS_DIR}" 2>&1) ; then
msg "WARNING: ${pghrep_output}"
pghrep_failed_md_stack="${pghrep_failed_md_stack} ${check_id}.md"
else
if [[ "${check_id}" == "K000" ]]; then
msg "${check_id}: markdown reports saved at:"
msg "'${MD_REPORTS_DIR}/K001.md'"
msg "'${MD_REPORTS_DIR}/K002.md'"
msg "'${MD_REPORTS_DIR}/K003.md'"
else
msg "${check_id}: markdown report saved at: '${MD_REPORTS_DIR}/${check_id}.md'"
fi
fi
done
if ! [[ -z "${pghrep_failed_md_stack}" ]]; then
msg
msg "WARNING: Can't generate Markdown reports (see errors above):"
local i=""
for i in ${pghrep_failed_md_stack}; do
echo " '${i}'"
done
fi
# generate final report
glue_md_reports
msg
if [[ "${FILE}" = "None" ]]; then
msg "Final .md report is ready at:"
echo " '${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}'"
msg
fi
if [[ "$PDF" == "true" ]] && [[ -f "${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}" ]]; then
./resources/md2pdf.sh --pdf ${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}
fi
if [[ "$HTML" == "true" ]] && [[ -f "${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}" ]]; then
./resources/md2pdf.sh --html ${MD_REPORTS_DIR}/${FULL_REPORT_FNAME}
fi
}
#######################################
# Upload reports to a remote server (Postgres.ai Platform API compatible).
# Globals:
# SCRIPT_DIR, UPLOADAPITOKEN, UPLOADAPIURL
# Arguments:
# None
# Returns:
# (text) stdout/stderr
#######################################
run_upload() {
PROJECT_DIR="${SCRIPT_DIR}/artifacts/${PROJECT}"
if [[ ! -z ${UPLOADAPITOKEN+x} ]] && [[ "${UPLOADAPITOKEN}" != "None" ]]; then
msg "Uploading the report to '${UPLOADAPIURL}' using Postgres.ai Platform API..."
if [[ $DEBUG == "true" ]] ; then
$(${PGHREP_BIN} --mode=upload --project=${PROJECT} --path="${PROJECT_DIR}" --token="${UPLOADAPITOKEN}" --apiurl="${UPLOADAPIURL}" --debug=1)
else
$(${PGHREP_BIN} --mode=upload --project=${PROJECT} --path="${PROJECT_DIR}" --token="${UPLOADAPITOKEN}" --apiurl="${UPLOADAPIURL}" --debug=0)
fi
fi
}
######### COMMANDS WRAPPERS ###########
ssh() {
local ssh_port=$DEFAULT_SSH_PORT
if [[ "$SSHPORT" != "None" ]]; then
ssh_port=$SSHPORT
fi
local ssh_timeout_options="-o ConnectTimeout=${CONNTIMEOUT} -o ServerAliveInterval=6 -o ServerAliveCountMax=5"
local ssh_options="-p ${ssh_port} -o StrictHostKeyChecking=no -o Compression=no -o BatchMode=yes ${ssh_timeout_options}"
local ssh_master_socket='/tmp/ssh_pg_check_%h_%p_%r'
local ssh_master_options="-o ControlMaster=auto -o ControlPersist=yes"
local ssh_persist=true
if [[ "$ssh_persist" = "true" ]]; then
ssh_options="$ssh_options -S $ssh_master_socket $ssh_master_options"
fi
if [[ "$@" == *" sudo "* ]] && [[ ! -z ${SSHSUDOPASSWORD+x} ]] ; then
command echo "$SSHSUDOPASSWORD" | command ssh -q -tt $ssh_options "$@"
else
command ssh $ssh_options "$@"
fi
}
#######################################
main() {
set -ueo pipefail
trap error_handler ERR
trap cleanup_and_exit 1 2 13 15 EXIT
unset LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES LC_TIME LC_NUMERIC >/dev/null 2>&1 || true
export LC_ALL=C
local mode=""
declare -a newparams=()
if [[ ! -z ${1+x} ]]; then
mode="$1"
fi
if [[ ! -f "${PGHREP_BIN}" ]]; then
err "Cannot load '${PGHREP_BIN}'. The binary needs to be built."
exit 2
fi
read_possible_args
process_cli_args "$@"
validate_args
case $MODE in
"collect" )
host_pre_start_checks
run_checks
;;
"process" )
check_bin_deps
run_process
;;
"upload" )
check_bin_deps
run_upload
;;
"run" )
host_pre_start_checks
run_checks
check_bin_deps
run_process
;;
esac
msg "ALL DONE!"
exit 0
}
main "$@"
# last line of the file
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
Go
1
https://gitee.com/mirrors/postgres-checkup.git
[email protected]:mirrors/postgres-checkup.git
mirrors
postgres-checkup
postgres-checkup
master

搜索帮助