#!/bin/sh # vi: set ft=sh noet ts=8 sw=8 :: Vi/ViM ############################################################ IDENT(1) # # $Title: Script to generate top-like statistics for BeeGFS I/O $ # $Copyright: 2020 Devin Teske. All rights reserved. $ # $FrauBSD: beegfstop/beegfstop 2020-07-19 19:47:32 -0700 freebsdfrau $ # ############################################################ INFORMATION # # In beegfstop, a ``view'' is the user's choice between -c, -s, or -u. # For example, `-u' asks beegfstop to display the ``user view'' where # statistics-displayed are on a per-user basis. # # The ``mode'' is the user's choice between viewing data input/output (this is # the default) and metadata operations (-m). # # The code is broken down into: # - Mode selection (user choice) # - View selection (user choice) # - Inspection of running system # - beegfs-ctl execution # - awk to process beegfs-ctl output # # Code navigating Search Terms/ST: # BEEGFSCTL Start of beegfs-ctl # CTLDATA beegfs-ctl data generation and processing # JSONDATA JSON data generation # OPS Data operations (read vs write) # POST beegfs-ctl post-processor (awk) # PRE Start of pre-processor (sh) # SORTING Sort routines # VIEWDATA View data generation # VIEWS View processing # ############################################################ DEFAULTS DEFAULT_INTERVAL=2 # seconds # # Sorting choices (case-insensitive): # total (in any mode; default sort column) # read write in out (without `-m') # stat statLI (with `-m' and either `-u' or `-c') # reqs qlen (with `-m' and `-s') # bsy (with `-M bsy' and `-s') # DEFAULT_SORT=total ############################################################ GLOBALS VERSION='$Version: 8.1.1 $' pgm="${0##*/}" # Program basename # # Global exit status # SUCCESS=0 FAILURE=1 # # Command-line options # COLOR=1 # -C DATA_METRIC= # -D metric[,metric] DEBUGGER= # -d INTERVAL=$DEFAULT_INTERVAL # -i sec META_METRIC= # -M metric[,metric] MODE=data # vs `meta' (-m) NO_NAMES= # -n NSAMPLES= # -N num OUTPUT_JSON= # -j RAW_VIEW= # -r REDACT=${BEEGFSTOP_REDACT:+1} # -R SHOW_ZERO_VALUES= # -V SHOW_BASIC= # -b SORT=$DEFAULT_SORT # -k col VIEW_CLIENT= # -c VIEW_SERVER= # -s VIEW_USER= # -u (default) WIDE_VIEW= # -w # # Miscellaneous # CONS=1 [ -t 1 ] || CONS= COLOR= # stdout is not a tty METRIC_LIST= MODE_DATA= MODE_META= VIEW= _METRIC_LIST= ############################################################ FUNCTIONS die() { local fmt="$1" if [ "$fmt" ]; then shift 1 # fmt printf "%s: $fmt\n" "$pgm" "$@" >&2 fi exit $FAILURE } usage() { local fmt="$1" local optfmt="\t%-12s %s\n" exec >&2 if [ "$fmt" ]; then shift 1 # fmt printf "%s: $fmt\n" "$pgm" "$@" fi printf "Usage: %s [-c|-s|-u] [OPTIONS]\n" "$pgm" printf "Options:\n" printf "$optfmt" "-b" "Show basic counts instead of rates." printf "$optfmt" "-C" "Always enable color." printf "$optfmt" "-c" "View read/write activity by client." printf "$optfmt" "-d" \ "Enable debugger. Repeat for mode debugging (4 max)." printf "$optfmt" "-h" "Print usage statement and exit." printf "$optfmt" "-i sec" \ "Set interval seconds. Default \`$DEFAULT_INTERVAL'." printf "$optfmt" "-j" "Output JSON formatted data." printf "$optfmt" "-k col" "Sort column (default \`$DEFAULT_SORT')." printf "$optfmt" "-M metric" "Show metadata metric." printf "$optfmt" "" "Optionally \`metricA,metricB' to compare metrics." printf "$optfmt" "-m" "Show metadata statistics." printf "$optfmt" "-N num" "Perform num samples and exit." printf "$optfmt" "-n" "Do not attempt to map uid/gid/ip to names." printf "$optfmt" "-o" "Force non-console output." printf "$optfmt" "-R" "Redact potentially sensitive information." printf "$optfmt" "-r" "Raw view. Do not format output of bpftrace." printf "$optfmt" "-s" "View read/write activity by server." printf "$optfmt" "-u" "View read/write activity by user (default)." printf "$optfmt" "-V" "Show zero-values. Only applies to \`-j'." printf "$optfmt" "-v" "Print version and exit." printf "$optfmt" "-w" "Wide view. Maximize width of first column." die } run_beegfs_ctl() { case "$MODE" in data) set -- --nodetype=storage "$@" ;; meta) set -- --nodetype=meta "$@" ;; esac if [ "$VIEW_CLIENT" ]; then [ "$NO_NAMES" ] || set -- --names "$@" set -- --clientstats ${MODE_DATA:+--rwunit=B} "$@" set -- --interval=$INTERVAL --perinterval "$@" elif [ "$VIEW_SERVER" ]; then [ "$NO_NAMES" ] || set -- --names "$@" set -- --serverstats --perserver --interval=1 "$@" elif [ "$VIEW_USER" ]; then # NB: --names has a bug; we need to avoid using it # NB: UID->name mapping/caching done in post-processing set -- --userstats ${MODE_DATA:+--rwunit=B} "$@" set -- --interval=$INTERVAL --perinterval "$@" fi expect -f- strace -e trace=poll beegfs-ctl "$@" <<-'END-EXPECT' | awk ' !/^(poll\(|\))/ { print; fflush(); next} sub(/^poll\(.*fd=0.*/, "---") { print; fflush() } ' # END-AWK spawn {*}$argv while (1) { expect { -re "fd=0" { send_user "\n" } eof { exit 0 } } } END-EXPECT } # # ST: CALLS # send_user() { local type="$1" shift 1 # type printf "%s|%s\n" "$type" "$*" } resize() { local size if [ -e /dev/tty ]; then size=$( { stty size < /dev/tty; } 2> /dev/null ) else size=$( stty size 2> /dev/null ) fi send_user resize "${size:-24 80}" } ############################################################ MAIN # # Process command-line options # while getopts bCcD:dhi:jk:M:mN:noRrsuVvw flag; do case "$flag" in b) SHOW_BASIC=1 ;; C) COLOR=1 ;; c) VIEW=CLIENT VIEW_CLIENT=1 ;; D) [ "$MODE" = data ] || die "-D metric cannot be combined with -m/-M" DATA_METRIC="$OPTARG" MODE=data ;; d) DEBUGGER=$(( $DEBUGGER + 1 )) ;; i) INTERVAL="$OPTARG" ;; j) OUTPUT_JSON=1 ;; k) SORT="$OPTARG" ;; M) META_METRIC="$OPTARG" MODE=meta ;; m) MODE=meta ;; N) [ "$OPTARG" ] || usage "-N option requires an argument" # NOTREACHED NSAMPLES="$OPTARG" ;; n) NO_NAMES=1 ;; o) CONS= COLOR= ;; R) REDACT=1 ;; r) RAW_VIEW=1 ;; s) VIEW=SERVER VIEW_SERVER=1 ;; u) VIEW=USER VIEW_USER=1 ;; V) SHOW_ZERO_VALUES=1 ;; v) VERSION="${VERSION#*: }" echo "${VERSION% $}" exit $SUCCESS ;; w) WIDE_VIEW=1 ;; *) usage # NOTREACHED esac done shift $(( $OPTIND - 1 )) # # Process command-line arguments # [ $# -eq 0 ] || usage "Too many arguments" # NOTREACHED # # Prevent non-functional option combinations # if [ "$META_METRIC" ]; then [ "$MODE" = meta ] || die "-m/-M cannot be combined with -D metric" [ ! "$OUTPUT_JSON" ] || die "-M metric cannot be combined with -j" fi if [ "$DATA_METRIC" ]; then [ "$MODE" = data ] || die "-D metric cannot be combined with -m/-M" [ ! "$OUTPUT_JSON" ] || die "-D metric cannot be combined with -j" fi if [ "$SHOW_BASIC" ]; then [ ! "$OUTPUT_JSON" ] || die "-b cannot be combined with -j" [ "$VIEW" != "SERVER" ] || [ "$MODE" != meta ] || die "-b cannot be combined with -s and -m" fi if [ "$SHOW_ZERO_VALUES" ]; then [ "$OUTPUT_JSON" ] || [ $DEBUGGER -ge 4 ] || die "-V only applies to -j or -dddd" fi # # Silently ignore previous view options unless JSON output # [ "$VIEW" ] || VIEW=USER VIEW_USER=1 if [ ! "$OUTPUT_JSON" ]; then case "$VIEW" in # ST: VIEWS CLIENT) VIEW_SERVER= VIEW_USER= ;; SERVER) VIEW_CLIENT= VIEW_USER= ;; USER) VIEW_CLIENT= VIEW_SERVER= ;; esac fi # # Validate `-i sec' option # case "$INTERVAL" in "") usage "missing -i argument" ;; # NOTREACHED 0) die "-i sec must be non-zero" ;; *[!0-9]*) die "-i sec must be a number" ;; esac # # Validate `-M metric' option # if [ "$VIEW_USER" -o "$VIEW_CLIENT" ]; then if [ "$META_METRIC" != "${META_METRIC#*,}" ]; then # Metric contains a comma, validate each separately METRIC_LIST="${META_METRIC%%,*} ${META_METRIC#*,}" for metric in $METRIC_LIST; do case "$metric" in # Alphabetically sorted "") metric= ;; # blank column [Aa][Cc][Kk]) metric=ack ;; [Cc][Ll][Oo][Ss][Ee]) metric=close ;; [Cc][Rr][Ee][Aa][Tt][Ee]) metric=create ;; [Cc][Rr][Ee][Aa][Tt][Ee][Ll][Ii]) metric=createLI ;; [Dd][Ii][Rr][Pp][Aa][Rr][Ee][Nn][Tt]) metric=dirparent ;; [Ee][Nn][Tt][Ii][Nn][Ff]) metric=entInf ;; [Gg][Ee][Tt][Xx][Aa]) metric=getXA ;; [Hh][Aa][Rr][Dd][Ll][Nn][Kk]) metric=hardlnk ;; [Mm][Dd][Ss][Ii][Nn][Ff]) metric=mdsInf ;; [Mm][Ii][Rr][Rr][Oo][Rr]) metric=mirror ;; [Mm][Kk][Dd][Ii][Rr]) metric=mkdir ;; [Nn][Oo][Dd][Ee][Ii][Nn][Ff]) metric=nodeInf ;; [Oo][Pp][Ee][Nn]) metric=open ;; [Rr][Dd][Dd][Ii][Rr]) metric=rddir ;; [Rr][Ee][Nn]) metric=ren ;; [Rr][Ee][Vv][Aa][Ll][Ll][Ii]) metric=revalLI ;; [Rr][Mm][Dd][Ii][Rr]) metric=rmdir ;; [Ss][Aa][Tt][Tt][Rr]) metric=sAttr ;; [Ss][Cc][Hh][Dd][Rr][Cc][Tt]) metric=sChDrct ;; [Ss][Tt][Aa][Tt]) metric=stat ;; [Ss][Tt][Aa][Tt][Ff][Ss]) metric=statfs ;; [Ss][Tt][Aa][Tt][Ll][Ii]) metric=statLI ;; [Ss][Uu][Mm]) metric=sum ;; [Tt][Rr][Uu][Nn][Cc]) metric=trunc ;; [Uu][Nn][Ll][Nn][Kk]) metric=unlnk ;; *) exec >&2 printf "%s: -M unknown value \`%s'\n" \ "$pgm" "$metric" printf "%s must be one of (%s):\n" \ "-M metric[,metric]" "case-insensitive" printf "\tack close create createLI dirparent" printf " entInf getXA hardlnk mdsInf mirror\n" printf "\tmkdir nodeInf open rddir ren revalLI" printf " rmdir sAttr sChDrct stat statfs\n" printf "\tstatLI sum trunc unlnk\n" die esac _METRIC_LIST="$_METRIC_LIST,$metric" done METRIC_LIST="${_METRIC_LIST#,}" elif [ "$MODE" = meta ]; then # A single metric or pair of metrics has been requested case "$META_METRIC" in "") META_METRIC=stat ;; [Ss][Tt][Aa][Tt]|[Ss][Tt][Aa][Tt][Ll][Ii]) META_METRIC=stat ;; [Cc][Rr][Ee][Aa][Tt][Ee]) META_METRIC=create ;; [Cc][Rr][Ee][Aa][Tt][Ee][Ll][Ii]) META_METRIC=create ;; [Rr][Mm]|[Rr][Mm][Dd][Ii][Rr]|[Uu][Nn][Ll][Nn][Kk]) META_METRIC=rm ;; [Ff][Dd]|[Oo][Pp][Ee][Nn]|[Cc][Ll][Oo][Ss][Ee]) META_METRIC=fd ;; # Alphabetical beyond this point [Aa][Cc][Kk]) META_METRIC=ack ;; [Dd][Ii][Rr][Pp][Aa][Rr][Ee][Nn][Tt]) META_METRIC=dirparent ;; [Ee][Nn][Tt][Ii][Nn][Ff]) META_METRIC=entInf ;; [Gg][Ee][Tt][Xx][Aa]) META_METRIC=getXA ;; [Hh][Aa][Rr][Dd][Ll][Nn][Kk]) META_METRIC=hardlnk ;; [Mm][Dd][Ss][Ii][Nn][Ff]) META_METRIC=mdsInf ;; [Mm][Ii][Rr][Rr][Oo][Rr]) META_METRIC=mirror ;; [Mm][Kk][Dd][Ii][Rr]) META_METRIC=mkdir ;; [Nn][Oo][Dd][Ee][Ii][Nn][Ff]) META_METRIC=nodeInf ;; [Rr][Dd][Dd][Ii][Rr]) META_METRIC=rddir ;; [Rr][Ee][Nn]) META_METRIC=ren ;; [Rr][Ee][Vv][Aa][Ll][Ll][Ii]) META_METRIC=revalLI ;; [Ss][Aa][Tt][Tt][Rr]) META_METRIC=sAttr ;; [Ss][Cc][Hh][Dd][Rr][Cc][Tt]) META_METRIC=sChDrct ;; [Ss][Tt][Aa][Tt][Ff][Ss]) META_METRIC=statfs ;; [Ss][Uu][Mm]) META_METRIC=sum ;; [Tt][Rr][Uu][Nn][Cc]) META_METRIC=trunc ;; *) exec >&2 printf "%s: -M unknown value \`%s'\n" \ "$pgm" "$META_METRIC" printf "%s metric must be one of (%s):\n" \ "-M" "case-insensitive" printf "\tstat or statLI (default pair shown)\n" printf "\tcreate or createLI\n" printf "\tfd or open or close\n" printf "\trm or rmdir or unlink\n" printf "or any of (also case-insensitive):\n" printf "\tack dirparent entInf getXA hardlnk mdsInf" printf " mirror mkdir nodeInf rddir\n" printf "\tren revalLI sAttr sChDrct statfs sum trunc\n" die esac elif [ "$DATA_METRIC" != "${DATA_METRIC#*,}" ]; then # Metric contains a comma, validate each separately METRIC_LIST="${DATA_METRIC%%,*} ${DATA_METRIC#*,}" for metric in $METRIC_LIST; do case "$metric" in # Alphabetically sorted "") metric= ;; # blank column [Aa][Cc][Kk]) metric=ack ;; [Bb]-[Rr][Dd]) metric=B-rd ;; [Bb]-[Ww][Rr]) metric=B-wr ;; [Cc][Ll][Oo][Ss][Ee]) metric=close ;; [Gg][Ee][Tt][Ff][Ss][Ii][Zz][Ee]) metric=getFSize ;; [Hh][Rr][Tt][Bb][Ee][Aa][Tt]) metric=hrtbeat ;; [Nn][Oo][Dd][Ee][Ii][Nn][Ff]) metric=nodeInf ;; [Oo][Pp][Ss]-[Rr][Dd]) metric=ops-rd ;; [Oo][Pp][Ss]-[Ww][Rr]) metric=ops-wr ;; [Ss][Uu][Mm]) metric=sum ;; [Tt][Rr][Uu][Nn][Cc]) metric=trunc ;; [Uu][Nn][Ll][Nn][Kk]) metric=unlnk ;; *) exec >&2 printf "%s: -D unknown value \`%s'\n" \ "$pgm" "$metric" printf "%s must be one of (%s):\n" \ "-D metric[,metric]" "case-insensitive" printf "\tack B-rd B-wr close getFSize" printf " hrtbeat nodeInf ops-rd ops-wr sAttr\n" printf "\tsChDrct statfs storInf sum trunc" printf " unlnk\n" die esac _METRIC_LIST="$_METRIC_LIST,$metric" done METRIC_LIST="${_METRIC_LIST#,}" else case "$DATA_METRIC" in "") DATA_METRIC=rw ;; [Rr][Ww]|[Rr][Ee][Aa][Dd]|[Ww][Rr][Ii][Tt][Ee]) DATA_METRIC=rw ;; # Alphabetical beyond this point [Aa][Cc][Kk]) DATA_METRIC=ack ;; [Bb]-[Rr][Dd]) DATA_METRIC=B-rd ;; [Bb]-[Ww][Rr]) DATA_METRIC=B-wr ;; [Cc][Ll][Oo][Ss][Ee]) DATA_METRIC=close ;; [Gg][Ee][Tt][Ff][Ss][Ii][Zz][Ee]) DATA_METRIC=getFSize ;; [Hh][Rr][Tt][Bb][Ee][Aa][Tt]) DATA_METRIC=hrtbeat ;; [Nn][Oo][Dd][Ee][Ii][Nn][Ff]) DATA_METRIC=nodeInf ;; [Oo][Pp][Ss]-[Rr][Dd]) DATA_METRIC=ops-rd ;; [Oo][Pp][Ss]-[Ww][Rr]) DATA_METRIC=ops-wr ;; [Ss][Uu][Mm]) DATA_METRIC=sum ;; [Tt][Rr][Uu][Nn][Cc]) DATA_METRIC=trunc ;; [Uu][Nn][Ll][Nn][Kk]) DATA_METRIC=unlnk ;; *) exec >&2 printf "%s: -D unknown value \`%s'\n" \ "$pgm" "$DATA_METRIC" printf "%s metric must be one of (%s):\n" \ "-D" "case-insensitive" printf "\trw or read or write (default shown)\n" printf "or any of (also case-insensitive):\n" printf "\tack B-rd B-wr close getFSize hrtbeat nodeInf" printf " ops-rd ops-wr sAttr\n" printf "\tsChDrct statfs storInf sum trunc unlnk\n" die esac fi elif [ "$VIEW_SERVER" ]; then if [ "$META_METRIC" != "${META_METRIC#*,}" ]; then # Metric contains a comma, validate each separately METRIC_LIST="${META_METRIC%%,*} ${META_METRIC#*,}" for metric in $METRIC_LIST; do case "$metric" in "") metric= ;; # blank column # Alphabetically sorted [Bb][Ss][Yy]) metric=bsy ;; [Qq][Ll][Ee][Nn]) metric=qlen ;; [Rr][Ee][Qq][Ss]) metric=reqs ;; *) exec >&2 printf "%s: -M unknown value \`%s'\n" \ "$pgm" "$metric" printf "%s metric must be one of (%s):\n" \ "-M" "case-insensitive" printf "\tbsy qlen reqs\n" die esac _METRIC_LIST="$_METRIC_LIST,$metric" done METRIC_LIST="${_METRIC_LIST#,}" elif [ "$MODE" = meta ]; then # A single metric or pair of metrics has been requested case "$META_METRIC" in "") META_METRIC=bsy ;; [Bb][Ss][Yy]) META_METRIC=bsy ;; [Qq][Ll][Ee][Nn]) META_METRIC=qlen ;; *) exec >&2 printf "%s: -M unknown value \`%s'\n" \ "$pgm" "$META_METRIC" printf "%s metric must be one of (%s):\n" \ "-M" "case-insensitive" printf "\t\`bsy' or \`qlen'\n" die esac elif [ "$DATA_METRIC" != "${DATA_METRIC#*,}" ]; then # Metric contains a comma, validate each separately METRIC_LIST="${DATA_METRIC%%,*} ${DATA_METRIC#*,}" for metric in $METRIC_LIST; do case "$metric" in # Alphabetically sorted "") metric= ;; # blank column [Bb][Ss][Yy]) metric=bsy ;; [Qq][Ll][Ee][Nn]) metric=qlen ;; [Rr][Ee][Aa][Dd]) metric=read ;; [Rr][Ee][Qq][Ss]) metric=reqs ;; [Ww][Rr][Ii][Tt][Ee]) metric=write ;; *) exec >&2 printf "%s: -D unknown value \`%s'\n" \ "$pgm" "$metric" printf "%s must be one of (%s):\n" \ "-D metric[,metric]" "case-insensitive" printf "\tbsy qlen read reqs write\n" die esac done METRIC_LIST="${_METRIC_LIST#,}" else case "$DATA_METRIC" in "") DATA_METRIC=rw ;; [Rr][Ww]|[Rr][Ee][Aa][Dd]|[Ww][Rr][Ii][Tt][Ee]) DATA_METRIC=rw ;; # Alphabetical beyond this point [Bb][Ss][Yy]) DATA_METRIC=bsy ;; [Qq][Ll][Ee][Nn]) DATA_METRIC=qlen ;; [Rr][Ee][Aa][Dd]) DATA_METIRC=read ;; [Ww][Rr][Ii][Tt][Ee]) DATA_METRIC=write ;; *) exec >&2 printf "%s: -D unknown value \`%s'\n" \ "$pgm" "$DATA_METRIC" printf "%s metric must be one of (%s):\n" \ "-D" "case-insensitive" printf "\trw or read or write (default shown)\n" printf "or any of (also case-insensitive):\n" printf "\tbsy qlen\n" die esac fi fi # # Validate `-k col' option # ST: SORTING # if [ "$MODE" = data ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Rr][Ee][Aa][Dd]|[Oo][Uu][Tt]) SORT=read ;; # READ(OUT) [Ww][Rr][Ii][Tt][Ee]|[Ii][Nn]) SORT=write ;; # WRITE(IN) *) die "-k col must be one of: total read write in out" esac elif [ "$MODE" = meta ]; then if [ "$VIEW_USER" -o "$VIEW_CLIENT" ]; then if [ "$META_METRIC" = create ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Cc][Rr][Ee][Aa][Tt][Ee]) SORT=create ;; # CREATE [Cc][Rr][Ee][Aa][Tt][Ee][Ll][Ii]) SORT=createLI ;; # CREATELI *) die "-k col must be one of: total create createLI" esac elif [ "$META_METRIC" = rm ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Rr][Mm][Dd][Ii][Rr]) SORT=rmdir ;; # RMDIR [Uu][Nn][Ll][Nn][Kk]) SORT=unlnk ;; # UNLNK *) die "-k col must be one of: total rmdir unlnk" esac elif [ "$META_METRIC" = fd ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Oo][Pp][Ee][Nn]) SORT=open ;; # OPEN [Cc][Ll][Oo][Ss][Ee]) SORT=close ;; # CLOSE *) die "-k col must be one of: total open close" esac elif [ "${META_METRIC:-stat}" = stat ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Ss][Tt][Aa][Tt]) SORT=stat ;; # STAT [Ss][Tt][Aa][Tt][Ll][Ii]) SORT=statLI ;; # STATLI *) die "-k col must be one of: total stat statLI" esac elif [ "$META_METRIC" != "${META_METRIC#*,}" ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED "${META_METRIC%%,*}"|"${META_METRIC#*,}") : ok ;; [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL *) die "-k col must be one of: total %s %s" \ "${META_METRIC%%,*}" "${META_METRIC#*,}" esac else case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED "$META_METRIC") : ok ;; # METRIC [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL *) die "-k col must be one of: total $META_METRIC" esac fi elif [ "$VIEW_SERVER" ]; then if [ "$META_METRIC" = bsy ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Rr][Ee][Qq][Ss]) SORT=reqs ;; # REQS [Bb][Ss][Yy]) SORT=bsy ;; # BSY *) die "-k col must be one of: total reqs bsy" esac elif [ "$META_METRIC" = qlen ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL [Rr][Ee][Qq][Ss]) SORT=reqs ;; # REQS [Qq][Ll][Ee][Nn]) SORT=qlen ;; # QLEN *) die "-k col must be one of: total reqs qlen" esac elif [ "$META_METRIC" != "${META_METRIC#*,}" ]; then case "$SORT" in "") usage "missing -k argument" ;; # NOTREACHED "${META_METRIC%%,*}"|"${META_METRIC#*,}") : ok ;; [Tt][Oo][Tt][Aa][Ll]) SORT=total ;; # TOTAL *) die "-k col must be one of: total %s %s" \ "${META_METRIC%%,*}" "${META_METRIC#*,}" esac else # NOTREACHED die "-M metric value unsupported \`%s'" \ "$META_METRIC" fi fi fi # # Process `-m' option # case "$MODE" in data) MODE_DATA=1 MODE_META= ;; meta) MODE_META=1 MODE_DATA= ;; esac # # Validate `-N num' option # case "$NSAMPLES" in *[!0-9]*) die "-N num must be a positive integer" ;; esac # # Get terminal size # size=$( resize ) size="${size#*|}" if [ "$size" ]; then cols="${size#*[$IFS]}" rows="${size%%[$IFS]*}" fi case "$rows$cols" in ""|*[!0-9]*) cols=80 rows=24 ;; esac # # Run script # ST: PRE # { trap resize WINCH # ST: SIGWINCH # # Start background beegfs-ctl # ST: BEEGFSCTL # run_beegfs_ctl & bpid=$! # # Wait on background (beegfs-ctl) child # status_collected= while kill -0 $bpid > /dev/null 2>&1; do wait > /dev/null 2>&1 # Collect exit status [ "$status_collected" ] || status_collected=$? done echo EXIT:$status_collected # Send status to post-processor } | awk -v color=${COLOR:-0} \ -v cols=$cols \ -v cons=${CONS:-0} \ -v data_metric="$DATA_METRIC" \ -v data_mode=${MODE_DATA:-0} \ -v debugger=${DEBUGGER:-0} \ -v interval=$INTERVAL \ -v meta_metric="$META_METRIC" \ -v meta_mode=${MODE_META:-0} \ -v mode="$MODE" \ -v no_names=${NO_NAMES:-0} \ -v nsamples=${NSAMPLES:--1} \ -v output_json=${OUTPUT_JSON:-0} \ -v raw_view=${RAW_VIEW:-0} \ -v redact=${REDACT:-0} \ -v rows=$rows \ -v show_basic=${SHOW_BASIC:-0} \ -v show_zero_values=${SHOW_ZERO_VALUES:-0} \ -v sort="$SORT" \ -v stderr=/dev/stderr \ -v uname=$( uname ) \ -v view="$VIEW" \ -v wide_view=${WIDE_VIEW:-0} \ '####################################### BEGIN # ST: POST BEGIN { debug2("Terminal size (rows, cols) = (%d, %d)", rows, cols) debug1("Sorting by %s", sort) debug1("Mode is %s", mode) if (data_mode) debug1("Data metric is %s", data_metric) else debug1("Meta metric is %s", meta_metric) exit_status = 0 # SUCCESS time_delta = 0 # Calculated if (view == "SERVER") { in_summary = 0 samples_left = nsamples subsamples_left = interval } else { in_summary = 1 samples_left = ++nsamples } tm = _systime() inv = "\033[7m" noinv = "\033[27m" bold = "\033[1m" nobold = "\033[22m" red = "\033[31m" green = "\033[32m" yellow = "\033[33m" cyan = "\033[36m" fgreset = "\033[39m" # Obtain current process (awk) pid (cmd = "echo $PPID") | getline apid close(cmd) # Obtain parent process (sh) pid getline stat < (file = sprintf("/proc/%d/stat", apid)) close(file) split(stat, st) spid = st[4] # Obtain parent process (sh) name getline stat < (file = sprintf("/proc/%d/stat", spid)) close(file) split(stat, st) comm = st[2] if (match(comm, /^\(.*\)$/)) comm = substr(comm, 2, length(comm) - 2) # Obtain child (sh) pid (cmd = sprintf("pgrep -P %d %s", spid, comm)) | getline cpid close(cmd) # Obtain beegfs-ctl pid (cmd = sprintf("pgrep -P %d", cpid)) | getline spid close(cmd) (cmd = sprintf("pgrep -P %d expect", spid)) | getline epid close(cmd) (cmd = sprintf("pgrep -P %d strace", epid)) | getline spid close(cmd) (cmd = sprintf("pgrep -P %d beegfs-ctl", spid)) | getline bpid close(cmd) if (!raw_view) { clear_data() resize() } if (redact) { m = "^(USER|total|" if ((u = ENVIRON["USER"]) != "") m = m u "|" if ((s = ENVIRON["SUDO_USER"]) != "" && s != u) m = m s "|" cmd = "getent passwd 2> /dev/null" while (cmd | getline > 0) { if (split($0, f, /:/) < 3) continue if (f[3] > 1024 && f[3] !~ /^6553[456]$/) continue m = m f[1] "|" } close(cmd) m = m "\\*)$" unredacted_users = m unredacted_clients = "^(CLIENT|total|\\*)$" unredacted_servers = "^(SERVER|total|\\*)$" } # Declare arrays delete labels delete lorder delete name2uid_cache delete uid2name_cache # # Aggregation constants # agg_str[agg_avg = 1] = "agg_avg" agg_str[agg_sum = 2] = "agg_sum" # # ST: VIEWS # if (view == "SERVER") { labels["name"] = no_names ? 1 : "NF" if (meta_mode) show_basic = 1 # column widths and units } else { labels["name"] = 1 } debug1("Basic mode (-b) %s", show_basic ? "enabled" : "disabled") type_kib = show_basic ? "kib" : "kib-rate" type_iec = show_basic ? "iec" : "iec-rate" type_num = show_basic ? "num" : "num-rate" type_reg = "num" o = 0 if (data_mode && view == "SERVER") { # NB: field 1 is node ID (unused) kv_map6("write", "", "write_bytes", 2, "write_rate", type_kib, agg_sum) kv_map6("read", "", "read_bytes", 3, "read_rate", type_kib, agg_sum) kv_map6("reqs", "", "reqs", 4, "", type_reg, agg_avg) kv_map6("qlen", "", "qlen", 5, "", type_reg, agg_avg) kv_map6("bsy", "", "bsy", 6, "", type_reg, agg_avg) lorder[++o] = "read" lorder[++o] = "write" # Alphabetical beyond this point lorder[++o] = "bsy" lorder[++o] = "qlen" lorder[++o] = "reqs" kv_ord(o, lorder) if (match(data_metric, /,/)) { label_map(1, substr(data_metric, 1, RSTART-1)) label_map(2, substr(data_metric, RSTART+1)) } else if (data_metric == "rw") { label_map(1, "read", "READ(OUT)") label_map(2, "write", "WRITE(IN)") } else { label_map(1, data_metric) label_map(2, "reqs") } labels["combined_rate"] = "rw_rate" labels["combined_total"] = "rw_bytes" } else if (data_mode) { # for USER/CLIENT kv_map6("read", "B-rd", "read_bytes", unused1, "read_rate", type_iec, agg_sum) kv_map6("write", "B-wr", "write_bytes", unused2, "write_rate", type_iec, agg_sum) lorder[++o] = "read" lorder[++o] = "write" # Alphabetical beyond this point lorder[++o] = "ack" lorder[++o] = "close" lorder[++o] = "getFSize" lorder[++o] = "hrtbeat" lorder[++o] = "nodeInf" lorder[++o] = "ops-rd" lorder[++o] = "ops-wr" lorder[++o] = "sAttr" lorder[++o] = "sChDrct" lorder[++o] = "statfs" lorder[++o] = "storInf" lorder[++o] = "sum" lorder[++o] = "trunc" lorder[++o] = "unlnk" kv_ord(o, lorder) for (n = 3; n <= o; n++) kv_map(lorder[n]) if (match(data_metric, /,/)) { label_map(1, substr(data_metric, 1, RSTART-1)) label_map(2, substr(data_metric, RSTART+1)) } else if (data_metric == "rw") { label_map(1, "read", "READ(OUT)") label_map(2, "write", "WRITE(IN)") } else { label_map(1, data_metric) } labels["combined_rate"] = "rw_rate" labels["combined_total"] = "rw_bytes" } else if (meta_mode && view == "SERVER") { # NB: field 1 is node ID (unused) kv_map4("reqs", 2, type_reg, agg_avg) kv_map4("qlen", 3, type_reg, agg_avg) kv_map4("bsy", 4, type_reg, agg_avg) lorder[++o] = "reqs" lorder[++o] = "qlen" lorder[++o] = "bsy" kv_ord(o, lorder) if (match(meta_metric, /,/)) { label_map(1, substr(meta_metric, 1, RSTART-1)) label_map(2, substr(meta_metric, RSTART+1)) } else { label_map(1, meta_metric) label_map(2, "reqs") } } else { # meta_mode for USER/CLIENT lorder[++o] = "stat" lorder[++o] = "statLI" lorder[++o] = "create" lorder[++o] = "createLI" lorder[++o] = "rmdir" lorder[++o] = "unlnk" lorder[++o] = "open" lorder[++o] = "close" # Alphabetical beyond this point lorder[++o] = "ack" lorder[++o] = "dirparent" lorder[++o] = "entInf" lorder[++o] = "getXA" lorder[++o] = "hardlnk" lorder[++o] = "mdsInf" lorder[++o] = "mirror" lorder[++o] = "mkdir" lorder[++o] = "nodeInf" lorder[++o] = "rddir" lorder[++o] = "ren" lorder[++o] = "revalLI" lorder[++o] = "sAttr" lorder[++o] = "sChDrct" lorder[++o] = "statfs" lorder[++o] = "sum" lorder[++o] = "trunc" kv_ord(o, lorder) for (n = 1; n <= o; n++) kv_map(lorder[n]) if (meta_metric == "stat") { label_map(1, "statLI") label_map(2, "stat") } else if (meta_metric == "create") { label_map(1, "createLI") label_map(2, "create") } else if (meta_metric == "rm") { label_map(1, "unlnk") label_map(2, "rmdir") } else if (meta_metric == "fd") { label_map(1, "close") label_map(2, "open") } else if (match(meta_metric, /,/)) { label_map(1, substr(meta_metric, 1, RSTART-1)) label_map(2, substr(meta_metric, RSTART+1)) } else { label_map(1, meta_metric) } } delete lorder # NB: See json_field_order set by kv_ord() n = o = 0 # NB: See njson_fields set by kv_ord() } ######################################## FUNCTIONS # # OS compatibility # function _asort(src, dest, k, nitems, i, val) { k = nitems = 0 for (i in src) dest[++nitems] = src[i] for (i = 1; i <= nitems; k = i++) { val = dest[i] while ((k > 0) && (dest[k] > val)) { dest[k+1] = dest[k]; k-- } dest[k+1] = val } return nitems } function _strftime(fmt, tm, cmd, tfmt) { if (uname == "Linux") return strftime(fmt, tm) if (tm == _strftime_tm && fmt == _strftime_fmt) return _strftime_dt tfmt = uname == "Linux" ? "-d @%u" : "-r %u" (cmd = sprintf("date " tfmt " +\"%s\"", _strftime_tm = tm, _strftime_fmt = fmt)) | getline _strftime_dt close(cmd) return _strftime_dt } function _systime( cmd, tm) { if (uname == "Linux") return systime() (cmd = "date +%s") | getline tm close(cmd) return tm } # # Debugging # function debug1(fmt,a1) { dprint(sprintf(fmt, a1)) } function debug2(fmt,a1,a2) { dprint(sprintf(fmt, a1, a2)) } function debug3(fmt,a1,a2,a3) { dprint(sprintf(fmt, a1, a2, a3)) } function debug4(fmt,a1,a2,a3,a4) { dprint(sprintf(fmt, a1, a2, a3, a4)) } function debug5(fmt,a1,a2,a3,a4,a5) { dprint(sprintf(fmt, a1, a2, a3, a4, a5)) } function dprint(str) { if (debugger == 0) return print str > stderr fflush(stderr) } function d3print2(fmt,a1,a2) { if (debugger >= 3) dprint(sprintf(fmt, a1, a2)) } function d4print2(fmt,a1,a2) { if (debugger >= 4) dprint(sprintf(fmt, a1, a2)) } function d4print3(fmt,a1,a2,a3) { if (debugger >= 4) dprint(sprintf(fmt, a1, a2, a3)) } function d4print4(fmt,a1,a2,a3,a4) { if (debugger >= 4) dprint(sprintf(fmt, a1, a2, a3, a4)) } # # Data display # function get_random(len, c, n, r, rdata, rfile, rlen) { if (len < 1) return "" rlen = 0 rdata = "" rfile = "/dev/urandom" while (length(rdata) < len && getline r < rfile > 0) { for (n = split(r, c, ""); n >= 1; n--) { if (c[n] !~ /[\x41-\x5a]/) continue rdata = rdata c[n] if (++rlen == len) break } } close(rfile) return rdata } function resize( dsz, vsz, vsz_fixed, bar_size_fixed, bar_size_fixed_max, bar_size_fixed_min, bar_min1, bar_min2, vsz_cols1, vsz_cols2, vsz_max, vsz_min, wv) { if (output_json) return # # Calculate columns and column widths # ST: VIEWS # # NB: bar_size = size of bar column (if shown) # NB: dsz = size of data columns # NB: vsz = size of VIEW column ("view size") # # If given -w (wide view) make bar_size fixed-width and # vsz variable-width. # # Without -w, make vsz fixed-width and bar_size variable. # wv = wide_view show_bar_column = 1 show_data_columns = 1 vsz_min = length(view) vsz_max = 15 dsz = show_basic ? 10 : 12 bar_size_fixed_max = 21 bar_size_fixed_min = 11 # # Calculate minimum terminal width required (bar_min1) # to display small bar (bar_size_fixed_min) and also # minimum terminal width required (bar_min2) to display # larger bar (bar_size_fixed_max). # bar_min1 = 0 bar_min1 += vsz_min + 1 # VIEW + space bar_min1 += dsz + 1 # TOTAL + space vsz_cols2 = bar_min1 bar_min1 += dsz + 1 # data + space vsz_cols1 = bar_min2 = bar_min1 bar_min1 += bar_size_fixed_min + 1 # small bar + space bar_min2 += bar_size_fixed_max + 1 # bigger bar + space bar_min1 += dsz bar_min2 += dsz vsz_cols1 += dsz # # Calculate fixed bar width based on terminal width # NB: Only used in wide-view (-w) # NB: If terminal is too narrow, disable bar/columns # if (cols >= bar_min2) { bar_size_fixed = bar_size_fixed_max } else if (cols >= bar_min1) { bar_size_fixed = bar_size_fixed_min } else { show_bar_column = 0 bar_size_fixed = 0 } # # Calculate fixed-size "VIEW" column width # NB: Unused in wide-view (-w) # vsz_fixed = vsz_min if (cols >= bar_min2) { vsz_fixed += cols - bar_min2 if (vsz_fixed > vsz_max) vsz_fixed = vsz_max } else if (cols >= bar_min1) { vsz_fixed += cols - bar_min1 if (vsz_fixed > vsz_max) vsz_fixed = vsz_max } else if (cols >= vsz_cols1) { vsz_fixed += cols - vsz_cols1 } else if (cols >= vsz_cols2) { show_data_columns = 0 wv = 1 } else { show_data_columns = 0 } if (wv) { # Fixed-width bar_size = bar_size_fixed # Variable-width (%-*s) vsz = cols vsz -= 0 + 1 # %-*s VIEW + space vsz -= dsz # TOTAL if (show_data_columns) { vsz -= 1 + dsz + 1 # space + data + space if (bar_size > 0) { vsz -= bar_size + 1 # bar + space } vsz -= dsz } } else if (show_bar_column) { # Fixed-width vsz = vsz_fixed # Variable-width (%-*s) bar_size = cols bar_size -= vsz + 1 # %[-]*s VIEW + space bar_size -= dsz + 1 # TOTAL + space bar_size -= dsz + 1 # data + space bar_size -= 0 + 1 # variable-width bar + space bar_size -= dsz } else { # Fixed-width vsz = vsz_fixed } # # Calculate format and line width # ST: VIEWS # fmt = "" fmtsz = 0 fmt = fmt " %-" vsz "s" # VIEW fmtsz += 1 + vsz fmt = fmt " %" dsz "s" # TOTAL fmtsz += 1 + dsz if (color) { fmt = fmt " " red "%" dsz "s" } else { fmt = fmt " %" dsz "s" } fmtsz += 1 + dsz if (show_bar_column) { full_bar = bar_size bar_size = int(bar_size / 2) if (bar_size * 2 == full_bar) bar_size-- fmt = fmt " %*s" # left bar # right bar if (color) { fmt = fmt fgreset "|" cyan "%-*s" } else { fmt = fmt "|%-*s" } fmtsz += 1 + bar_size + 1 + bar_size fmt = fmt " %-" dsz "s" fmtsz += 1 + dsz } else if (show_data_columns) { if (color) { fmt = fmt " " cyan "%" dsz "s" } else { fmt = fmt " %" dsz "s" } fmtsz += 1 + dsz } fmt = substr(fmt, 2) # Trim leading space fmtsz -= 1 fmt = fmt (color ? fgreset : "") "\n" # # Export calculated column sizes for things we truncate # delete csz csz["view"] = vsz # # Redraw console # if (cons) { clear_buffer() buffer_add_data() print_buffer() } } # # Key/Value-store # function idx(p1, p2) { return (p2 == "" ? p1 : p1 " " p2) } function kv_map(x, field) { kv_map6(x, x, x, field, "", type_num, agg_sum) } function kv_map4(x, field, type, agg) { kv_map6(x, x, x, field, "", type, agg) } function clear_data() { delete _keys delete key_items delete key_value } function kv_avg(p1, p2, v, key, bef, n) { if (p1 == "") return key = idx(p1, p2) bef = key_value[key] key_value[key] *= key_items[key]++ key_value[key] += v key_value[key] /= (n = key_items[key]) if (debugger >= 4 && (v != 0 || show_zero_values)) { debug5("++ key_value[%s] avg changed %s => %s (%s%d)", key, sprintf("%d/%d", bef * (n - 1), n - 1), sprintf("%d/%d", key_value[key] * n, n), v >= 0 ? "+" : "", v) } } function kv_map6(x, prop, key, field, rate, type, agg) { json_fields[x] json_field[x] = int(field) # SERVER mode (-s) json_prop[x] = prop == "" ? x : prop json_key[x] = key == "" ? x : key if (data_mode || view != "SERVER") { json_rate[x] = rate == "" ? key "_rate" : rate } data_type[key] = type == "" ? type_num : type data_agg[key] = agg == "" ? agg_sum : agg d4print4("data_type[%s]=[%s] data_agg[%s]=[%s]", key, data_type[key], key, agg_str[data_agg[key]]) } function kv_ord(nelements, list, n) { json_field_order = "" for (n = 1; n <= nelements; n++) json_field_order = json_field_order " " list[n] json_field_order = substr(json_field_order, 2) njson_fields = split(json_field_order, json_field_name) } function kv_sum(p1, p2, v, key, bef) { if (p1 == "") return key = idx(p1, p2) bef = key_value[key] key_value[key] += v if (debugger >= 4 && (v != 0 || show_zero_values)) { debug5("++ key_value[%s] sum changed %d => %d (%s%d)", key, bef, key_value[key], v >= 0 ? "+" : "", v) } } function kv(p1, p2, i) { return ((i = idx(p1, p2)) in key_value ? key_value[i] : "") } function label_map(n, x, colnam) { labels["colnam" n] = colnam == "" ? toupper(x) : colnam labels["field" n] = json_field[x] # SERVER mode (-s) labels["prop" n] = json_prop[x] labels["key" n] = json_key[x] if (data_agg[x] != agg_avg) { labels["rate" n] = json_rate[x] } } # # Buffer management # function buffer_add(text) { BUFFER = BUFFER text } function print_buffer() { if (!cons && !output_json) buffer_add("\n") printf "%s", BUFFER fflush() } function clear_buffer() { BUFFER = "" if (output_json) return if (debugger == 0 && cons) buffer_add(sprintf("\033[H\033[J")) buffer_add_header() } function buffer_add1(arg1, total, value1, bar1, bar2, value2, prefix, suffix, str) { if (redact && view == "USER") { if (arg1 !~ unredacted_users) arg1 = get_random(length(arg1)) } else if (redact && view == "CLIENT") { if (arg1 !~ unredacted_clients) arg1 = get_random(length(arg1)) } else if (redact && view == "SERVER") { if (arg1 !~ unredacted_servers) arg1 = get_random(length(arg1)) } if (length(arg1) > csz["view"]) { arg1 = substr(arg1, 1, csz["view"]) } if (show_bar_column) { str = sprintf(fmt, arg1, total, value2, bar_size, bar2, bar_size, bar1, value1) } else { str = sprintf(fmt, arg1, total, value2, value1) } if (cols < fmtsz) { str = substr(str, 1, cols) (str ~ /\n$/ ? "\n" : "") } buffer_add(prefix str suffix) } function buffer_add_header( prefix, suffix, presz, n, fmt, dtfmt, dtsz, ifmt, sz, str) { if (output_json) return ifmt = "%.3fs" presz = 9 + 1 + length(sprintf(ifmt, interval)) # 9 = "Interval:" # 1 = number of spaces sz = cols < fmtsz ? cols : fmtsz for (n = split("|%T|%F %T|%c", fmt, /\|/); n > 0; n--) { dtfmt = fmt[n] dtsz = dtfmt == "" ? 0 : length(_strftime(dtfmt, tm)) if (sz >= presz + 1 + dtsz) break } if (dtfmt == "") { str = sprintf("Interval: " ifmt, interval) if (length(str) > sz) { str = substr(str, 1, sz) } buffer_add(str "\n") } else { buffer_add(sprintf("Interval: %-*s %*s\n", sz - 9 - 2 - dtsz, sprintf(ifmt, interval), dtsz, _strftime(dtfmt, tm))) # 9 = "Interval:" # 2 = number of spaces } prefix = color ? inv green : "" suffix = color ? fgreset noinv : "" empty_bar = "" buffer_add1(view, "TOTAL", labels["colnam1"], empty_bar, empty_bar, labels["colnam2"], prefix, suffix) } function buffer_add_data() { # # Process requested view # process_view(tolower(view)) } # # Data parsing # function uid2name(uid, cmd, id, idf, name) { if (uid ~ /:/) return uid if (uid in uid2name_cache) return uid2name_cache[uid] (cmd = sprintf("getent passwd -- %d", uid)) | getline id close(cmd) name = split(id, idf, /:/) < 3 ? uid : idf[1] if (name == "") name = uid name2uid_cache[name] = uid return uid2name_cache[uid] = name } function name2uid(name) { return name in name2uid_cache ? name2uid_cache[name] : "" } function parse_line( key, key1, type1, value1, key2, type2, value2, kprop, value, n, f, k, v) { # ST: CTLDATA sub(/\r$/, "") # NB: Data comes in CRLF format (sigh) # # NB: Fix missing whitespace between sum and name # NB: An IPv4 addr is not long enough to trigger this bug, # and hostnames cannot end in a number, so insert the # space at the point where they meet. # if (match($0, /[^[:space:][:digit:]][[:digit:]]+ \[sum\]/)) { $0 = substr($0, 1, RSTART) " " substr($0, RSTART + 1) } # # ST: VIEWS OPS # key = labels["name"] == "NF" ? $NF : $labels["name"] if (view != "SERVER") $0 = substr($0, length(key) + 1) # Map UID to name unless disabled (-n) if (key !~ /:/ && view == "USER" && !no_names) { d3print2("Mapped UID %d to \"%s\"", key, uid2name(key)) key = uid2name(key) } key1 = labels["key1"] key2 = labels["key2"] type1 = data_type["key1"] type2 = data_type["key2"] value1 = value2 = 0 if (view == "SERVER") { # NB: Server mode gives no sum, create one if (output_json) { # ST: CTLDATA for (n = 1; n <= njson_fields; n++) { f = json_field_name[n] k = json_key[f] v = $json_field[f] if (data_type[k] == type_kib) v *= 1024 # NB: KiB/s -> B/s kv_sum(k, "Sum:", v) if (data_agg[f] == agg_avg) kv_avg(k, key, v) else # agg_sum kv_sum(k, key, v) } } else { value1 = $labels["field1"] if (data_type[key1] == type_kib) value1 *= 1024 # NB: KiB/s -> B/s value2 = $labels["field2"] if (data_type[key2] == type_kib) value2 *= 1024 # NB: KiB/s -> B/s kv_sum(key1, "Sum:", value1) kv_sum(key2, "Sum:", value2) if (data_agg[key1] == agg_avg) kv_avg(key1, key, value1) else kv_sum(key1, key, value1) if (data_agg[key2] == agg_avg) kv_avg(key2, key, value2) else kv_sum(key2, key, value2) } } else { # ST: CTLDATA delete _props while (match($0, /^ *[^ ]+ \[[^\]]+\]/)) { kprop = substr($2, 2, length($2) - 2) value = $1 _props[kprop] = value $0 = substr($0, RLENGTH + 1) } if (output_json) { value = 0 for (n = 1; n <= njson_fields; n++) { f = json_field_name[n] value += _props[json_prop[f]] } if (value < 1) return 0 for (n = 1; n <= njson_fields; n++) { f = json_field_name[n] kv_sum(json_key[f], key, _props[json_prop[f]]) } } else { value1 = int(_props[labels["prop1"]]) value2 = int(_props[labels["prop2"]]) if (value1 + value2 < 1) return 0 kv_sum(key1, key, value1) kv_sum(key2, key, value2) } } _keys[key] return 1 } # # Data formatting # function humanize(key, value, raw, n, suffix, suffixes, div, fmt, unit, key1, key2, type, type2) { raw = value if (match(key, /\+/)) { type = data_type[key1 = substr(key, 1, RSTART - 1)] type2 = data_type[key2 = substr(key, RSTART + 1)] if (type != type2 && key1 != "" && key2 != "") { d4print4("+ humanize(%s, %s) => %s [%s]", key, raw, "-", sprintf("%s; %s != %s", "type mismatch", type, type2)) return "-" } } else { type = data_type[key] } if (type == type_iec || type == type_kib) { div = 1024 fmt = "%'"'"'.2f%s%s" unit = type ~ /-rate/ && !in_summary ? "B/s" : "B" } else if (type == type_num) { div = 1000 fmt = "%'"'"'.1f%s%s" unit = type ~ /-rate/ && !in_summary ? "/s" : "" } else { # type_reg div = 1000 fmt = "%'"'"'.1f%s%s" unit = "" } n = split(",K,M,G,T,E", suffixes, /,/) for (suffix = 1; suffix <= n; suffix++) { if (int(value) < div) break value /= div } if (v ~ /\./) sub(/\.?0+$/, "", v) value = sprintf(fmt, value, suffixes[suffix], unit) d4print4("-> humanize(%s, %s) => %s [%s]", key, raw, value, type) return value } function json_add(json, key, format, value) { return json (length(json) < 2 ? "" : ",") \ sprintf("\"%s\":" format, key, value) } function json_add_str(json, key, value) { return json_add(json, key, "\"%s\"", value) } function json_add_uint(json, key, value) { return json_add(json, key, "%u", value) } function json_add_prec(json, key, precision, value, x) { x = sprintf("%.*f", precision, value) if (x ~ /\./) sub(/\.?0+$/, "", x) return json_add(json, key, "%s", x) } function json_add_float(json, key, value) { return json_add_prec(json, key, 12, value) } function process_view(curview, key1, bar1, rate1, total1, key2, bar2, rate2, total2, combined_rate, combined_total, cred, i, _keys_sorted, n, r, table_rows, v) { time_delta = interval if (time_delta < 1) time_delta = 1 # prevent division-by-0 debug1("Time delta is %d seconds", time_delta) if (output_json && in_summary) if (nsamples != 1) return key1 = labels["key1"] key2 = labels["key2"] bar1 = bar2 = "" total1 = kv(key1, "Sum:") if (data_agg[key1] == agg_avg) { total1 /= interval rate1 = 0 # UNUSED } else { rate1 = total1 / time_delta } if (key2 != "") { total2 = kv(key2, "Sum:") if (!output_json) combined_total += total2 if (data_agg[key2] == agg_avg) { total2 /= interval rate2 = 0 # UNUSED } else { rate2 = total2 / time_delta } } if (data_mode && (output_json || data_metric == "rw")) { combined_total = total1 + total2 combined_rate = combined_total / time_delta } if (output_json) { process_view_json(curview, combined_total, combined_rate) } else { process_view_cons(curview, combined_total, combined_rate, key1, key2, total1, total2, rate1, rate2) } # # Decorate combined read/write values # ST: SORTING # for (cred in _keys) { if (sort == "total") { v = 0 v += kv(key1, cred) v += kv(key2, cred) } else { v = kv(sort, cred) } _keys[cred] = sprintf("%99.12f %s", v, cred) } # # Print subtotals # r = 1 n = _asort(_keys, _keys_sorted) table_rows = output_json || !cons ? n : rows - 4 for (i = n; i >= 1 && r <= table_rows; i--) { if (debugger >= 3 && !output_json) { debug2("r=[%d] table_rows=[%d]", r, table_rows) } cred = _keys_sorted[i] sub(/^ *[^ ]+ +/, "", cred) # Undecorate r += process_cred(curview, cred) } if (more = i > 0) buffer_add(sprintf("%s(%d more) ... %s", color ? inv bold yellow : "", i, color ? noinv nobold fgreset : "")) } function process_view_json(curview, combined_total, combined_rate, f, k, n, json_out, v) { json_out = "" json_out = json_add_uint(json_out, "time", tm) json_out = json_add_str(json_out, "ident", "total_" curview) if (data_mode) { json_out = json_add_uint(json_out, labels["combined_total"], combined_total) json_out = json_add_float(json_out, labels["combined_rate"], combined_rate) } for (n = 1; n <= njson_fields; n++) { f = json_field_name[n] v = kv(k = json_key[f], "Sum:") if (!show_zero_values) { if (v == "") continue if (v == 0) continue if (v < 0.000000000001) continue } if (data_agg[k] == agg_avg) { d4print3("!! %s Sum: %s %s", f, sprintf("%d/%d", v, interval), sprintf("(%.2f)", v / interval)) json_out = json_add_float(json_out, json_key[f], v / interval) } else { d4print2("!! %s Sum: %s", f, v) json_out = json_add_uint(json_out, json_key[f], v) json_out = json_add_float(json_out, json_rate[f], v / interval) } } buffer_add("{" json_out "}\n") } function process_view_cons(curview, combined_total, combined_rate, key1, key2, total1, total2, rate1, rate2, dv1, dv2, dvc) { if (debugger >= 4 && data_agg[key1] == agg_avg) { debug2("!! %s Sum: %s", key1, sprintf("%d/%d (%.2f)", total1 * interval, interval, total1)) } else { # agg_sum d4print2("!! %s Sum: %d", key1, total1) } if (debugger >= 4 && data_agg[key2] == agg_avg) { debug2("!! %s Sum: %s", key2, sprintf("%d/%d (%.2f)", total2 * interval, interval, total2)) } else if (key != "") { # agg_sum d4print2("!! %s Sum: %d", key2, total2) } # Display values dvc = show_basic ? combined_total : combined_rate dv1 = data_type[key1] ~ /-rate/ ? rate1 : total1 dv2 = data_type[key2] ~ /-rate/ ? rate2 : total2 buffer_add1("total", humanize(key1 "+" key2, dvc), humanize(key1, dv1), bar1, bar2, humanize(key2, dv2)) } function process_cred(curview, cred, total1, cred_value1, bar1, bar1_pct, bar1_size, value1_rate, total2, cred_value2, bar2, bar2_pct, bar2_size, value2_rate, cred_total, cred_rate, pch, key1, key2) { if (cred == "Sum:") return 0 pch = "=" if ((debugger >= 3 && !output_json) || debugger >= 4) { debug1("-> process_cred(curview = %s, ...)", curview) debug1("+ cred=[%s]", cred) } key1 = labels["key1"] key2 = labels["key2"] total1 = kv(key1, "Sum:") if (data_agg[key1] == agg_avg) total1 /= interval cred_value1 = kv(key1, cred) cred_total = cred_value1 if (key2 != "") { total2 = kv(key2, "Sum:") if (data_agg[key2] == agg_avg) total2 /= interval cred_value2 = kv(key2, cred) cred_total += cred_value2 } cred_rate = cred_total / time_delta # Bar 1 value1_rate = cred_value1 / time_delta if (total1 > 0) bar1_pct = cred_value1 / total1 else bar1_pct = 0 bar1_size = bar_size * bar1_pct bar1 = sprintf("%*s", bar1_size, "") gsub(/ /, pch, bar1) sub(/.$/, ">", bar1) # Bar 2 if (key2 != "") { value2_rate = cred_value2 / time_delta if (total2 > 0) bar2_pct = cred_value2 / total2 else bar2_pct = 0 bar2_size = bar_size * bar2_pct bar2 = sprintf("%*s", bar2_size, "") gsub(/ /, pch, bar2) sub(/^./, "<", bar2) } if (output_json) { process_cred_json(curview, cred, cred_total, cred_rate) } else { process_cred_cons(curview, cred, cred_total, cred_rate, key1, bar1, bar1_pct, cred_value1, value1_rate, key2, bar2, bar2_pct, cred_value2, value2_rate) } return 1 } function process_cred_json(curview, cred, cred_total, cred_rate, _cred, f, json_out, k, n, t, v) { # ST: JSONDATA if (!show_zero_values) { v = 0 for (n = 1; n <= njson_fields; n++) { f = json_field_name[n] v += kv(json_key[f], cred) } if (v == 0) return 0 } json_out = "" json_out = json_add_uint(json_out, "time", tm) json_out = json_add_str(json_out, "ident", curview) _cred = cred if (redact && curview == "user") { if (cred !~ unredacted_users) _cred = get_random(length(cred)) } else if (redact && curview == "client") { if (cred !~ unredacted_clients) _cred = get_random(length(cred)) } else if (redact && curview == "server") { if (cred !~ unredacted_servers) _cred = get_random(length(cred)) } json_out = json_add_str(json_out, curview, _cred) if (!redact && curview == "user" && name2uid(cred) != "") { json_out = json_add_str(json_out, "uid", name2uid(cred)) } if (mode == "data") { json_out = json_add_uint(json_out, labels["combined_total"], cred_total) json_out = json_add_float(json_out, labels["combined_rate"], cred_rate) } for (n = 1; n <= njson_fields; n++) { f = json_field_name[n] k = json_key[f] v = kv(k, cred) if (!show_zero_values) { if (v == "") continue if (v == 0) continue if (v < 0.000000000001) continue } t = kv(k, "Sum:") if (data_agg[k] == agg_avg) t /= interval d4print3("!! %s %s: %s", f, cred, v) d4print3("!! %s %s %s", f, cred, sprintf("is %.12f%% of total", t == 0 ? 0 : v * 100 / t)) if (data_type[k] ~ /-rate/) { json_out = json_add_uint(json_out, k, v) json_out = json_add_float(json_out, json_rate[f], v / interval) } else { json_out = json_add_float(json_out, k, v) } } buffer_add("{" json_out "}\n") } function process_cred_cons(curview, cred, cred_total, cred_rate, key1, bar1, bar1_pct, cred_value1, value1_rate, key2, bar2, bar2_pct, cred_value2, value2_rate, dv1, dv2, dvc, n) { if (debugger >= 4 && (cred_value1 >= 0.000000000001 || show_zero_values)) { if (data_agg[key1] == agg_avg) { n = key_items[idx(key1, cred)] debug4("!! %s %s: %s (%.2f)", key1, cred, sprintf("%d/%d", cred_value1 * n, n), cred_value1) } else { debug3("!! %s %s %d", key1, cred, cred_value1) } debug3("!! %s %s %s", key1, cred, sprintf("is %.12f%% of total", bar1_pct * 100)) } if (debugger >= 4 && key2 != "" && (cred_value2 > 0.0000000000009 || show_zero_values)) { if (data_agg[key2] == agg_avg) { n = key_items[idx(key2, cred)] debug4("!! %s %s: %s (%.2f)", key2, cred, sprintf("%d/%d", cred_value2 * n, n), cred_value2) } else { debug3("!! %s %s %d", key2, cred, cred_value2) } debug3("!! %s %s %s", key2, cred, sprintf("is %.12f%% of total", bar2_pct * 100)) } # Display values dvc = show_basic ? cred_total : cred_rate dv1 = data_type[key1] ~ /-rate/ ? value1_rate : cred_value1 dv2 = data_type[key2] ~ /-rate/ ? value2_rate : cred_value2 buffer_add1(cred, humanize(key1 "+" key2, dvc), humanize(key1, dv1), bar1, bar2, humanize(key2, dv2)) } # # Flow control # function sample_check() { if (in_summary) in_summary = 0 if (nsamples > 0 && --samples_left < 1) { system(sprintf("kill %d > /dev/null 2>&1", bpid)) if (more) printf "\n" exit } } ######################################## MAIN sub(/^EXIT:/, "") { exit_status = $0; next } NR == 1 && $1 == "spawn" && !(raw_view || debugger) { next } raw_view || debugger >= 2 { if (debugger > 0) { print > stderr fflush(stderr) } else print # Exit if no more samples desired if (/^---$/ && raw_view) sample_check() if (debugger == 0) next } # # ST: CTLDATA # /^===/ { # Data start tm = _systime() if (view == "SERVER") { if (subsamples_left == interval) { clear_data() clear_buffer() } } else { clear_data() clear_buffer() } next } /^---$/ { # Data end if (view == "SERVER") { if (subsamples_left-- > 1) next else subsamples_left = interval } buffer_add_data() # # Dump information # print_buffer() sample_check() # Exit if no more samples desired next } view == "SERVER" && /^[[:space:]]*[[:digit:]]/ { # Data parse_line() next } view != "SERVER" && /^[^=[:space:]]/ && $1 !~ /\|/ { # Data parse_line() next } # # ST: CALLS # { call = "" } match($0, /^[_a-z]+(-[_a-z]+)?\|/) { call = substr($0, 1, RLENGTH - 1) $0 = substr($0, RSTART + RLENGTH) } call == "resize" { if (output_json) next rows = $1 cols = $2 resize() } ################################################## END END { exit exit_status } ' # END-QUOTE ################################################################################ # END ################################################################################ # Local Variables: # mode: sh # tab-width: 8 # sh-basic-offset: 8 # indent-tabs-mode: t # backward-delete-char-untabify-method: nil # End: