Use common datetime part when naming output files across all formats

- the datetime is fetched just once
- it is then passed to the functions that start the output files, always as arg1
This commit is contained in:
Jan Dvorak
2025-09-25 23:26:33 +02:00
parent d66b67befe
commit 67aba03a41

View File

@ -1453,6 +1453,7 @@ fileout() {
json_header() { json_header() {
local datetime_filename_part="$1"
local fname_prefix local fname_prefix
local filename_provided=false local filename_provided=false
@ -1481,9 +1482,9 @@ json_header() {
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
fi fi
if [[ -z "$JSONFILE" ]]; then if [[ -z "$JSONFILE" ]]; then
JSONFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".json)" JSONFILE="$fname_prefix-${datetime_filename_part}.json"
elif [[ -d "$JSONFILE" ]]; then elif [[ -d "$JSONFILE" ]]; then
JSONFILE="$JSONFILE/${fname_prefix}-$(date +"%Y%m%d-%H%M".json)" JSONFILE="$JSONFILE/${fname_prefix}-${datetime_filename_part}.json"
fi fi
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then
@ -1504,6 +1505,7 @@ json_header() {
csv_header() { csv_header() {
local datetime_filename_part="$1"
local fname_prefix local fname_prefix
local filename_provided=false local filename_provided=false
@ -1529,9 +1531,9 @@ csv_header() {
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
fi fi
if [[ -z "$CSVFILE" ]]; then if [[ -z "$CSVFILE" ]]; then
CSVFILE="${fname_prefix}-$(date +"%Y%m%d-%H%M".csv)" CSVFILE="${fname_prefix}-${datetime_filename_part}.csv"
elif [[ -d "$CSVFILE" ]]; then elif [[ -d "$CSVFILE" ]]; then
CSVFILE="$CSVFILE/${fname_prefix}-$(date +"%Y%m%d-%H%M".csv)" CSVFILE="$CSVFILE/${fname_prefix}-${datetime_filename_part}.csv"
fi fi
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then
@ -1558,6 +1560,7 @@ csv_header() {
################# END JSON file functions. START HTML functions #################### ################# END JSON file functions. START HTML functions ####################
html_header() { html_header() {
local datetime_filename_part="$1"
local fname_prefix local fname_prefix
local filename_provided=false local filename_provided=false
@ -1586,9 +1589,9 @@ html_header() {
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
fi fi
if [[ -z "$HTMLFILE" ]]; then if [[ -z "$HTMLFILE" ]]; then
HTMLFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".html)" HTMLFILE="$fname_prefix-${datetime_filename_part}.html"
elif [[ -d "$HTMLFILE" ]]; then elif [[ -d "$HTMLFILE" ]]; then
HTMLFILE="$HTMLFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".html)" HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_filename_part}.html"
fi fi
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then
@ -1636,8 +1639,9 @@ html_footer() {
################# END HTML file functions #################### ################# END HTML file functions ####################
prepare_logging() { prepare_logging() {
# arg1: for testing mx records name we put a name of logfile in here, otherwise we get strange file names local datetime_filename_part="$1"
local fname_prefix="$1" # arg2: for testing mx records name we put a name of logfile in here, otherwise we get strange file names
local fname_prefix="$2"
local filename_provided=false local filename_provided=false
if [[ -n "$PARENT_LOGFILE" ]]; then if [[ -n "$PARENT_LOGFILE" ]]; then
@ -1654,10 +1658,10 @@ prepare_logging() {
[[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" [[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
if [[ -z "$LOGFILE" ]]; then if [[ -z "$LOGFILE" ]]; then
LOGFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".log)" LOGFILE="$fname_prefix-${datetime_filename_part}.log"
elif [[ -d "$LOGFILE" ]]; then elif [[ -d "$LOGFILE" ]]; then
# actually we were instructed to place all files in a DIR instead of the current working dir # actually we were instructed to place all files in a DIR instead of the current working dir
LOGFILE="$LOGFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".log)" LOGFILE="$LOGFILE/$fname_prefix-${datetime_filename_part}.log"
else else
: # just for clarity: a log file was specified, no need to do anything else : # just for clarity: a log file was specified, no need to do anything else
fi fi
@ -23412,6 +23416,7 @@ draw_line() {
run_mx_all_ips() { run_mx_all_ips() {
local datetime_filename_part="$1"
local mxs mx local mxs mx
local mxport local mxport
local -i ret=0 local -i ret=0
@ -23419,18 +23424,18 @@ run_mx_all_ips() {
STARTTLS_PROTOCOL="smtp" STARTTLS_PROTOCOL="smtp"
# test first higher priority servers # test first higher priority servers
mxs=$(get_mx_record "$1" | sort -n | sed -e 's/^.* //' -e 's/\.$//' | tr '\n' ' ') mxs=$(get_mx_record "$2" | sort -n | sed -e 's/^.* //' -e 's/\.$//' | tr '\n' ' ')
if [[ $CMDLINE_IP == one ]]; then if [[ $CMDLINE_IP == one ]]; then
word="as instructed one" # with highest priority word="as instructed one" # with highest priority
mxs=${mxs%% *} mxs=${mxs%% *}
else else
word="the only" word="the only"
fi fi
mxport=${2:-25} mxport=${3:-25}
if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then
prepare_logging prepare_logging "${datetime_filename_part}"
else else
prepare_logging "${FNAME_PREFIX}mx-$1" prepare_logging "${datetime_filename_part}" "${FNAME_PREFIX}mx-$1"
fi fi
if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then
[[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true [[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true
@ -25324,6 +25329,7 @@ lets_roll() {
RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret? RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret?
ip="" ip=""
stopwatch start stopwatch start
DATETIME_FILENAME_PART="$(date +"%Y%m%d-%H%M")"
lets_roll init lets_roll init
initialize_globals initialize_globals
@ -25334,9 +25340,9 @@ lets_roll() {
# html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the # html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the
# command line contains --htmlfile <htmlfile> or --html, it'll make problems with html output, see #692. # command line contains --htmlfile <htmlfile> or --html, it'll make problems with html output, see #692.
# json_header and csv_header could be called later but for context reasons we'll leave it here # json_header and csv_header could be called later but for context reasons we'll leave it here
html_header html_header "${DATETIME_FILENAME_PART}"
json_header json_header "${DATETIME_FILENAME_PART}"
csv_header csv_header "${DATETIME_FILENAME_PART}"
get_install_dir get_install_dir
# see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705 # see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705
[[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE" [[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE"
@ -25361,7 +25367,7 @@ lets_roll() {
fileout_banner fileout_banner
if "$do_mass_testing"; then if "$do_mass_testing"; then
prepare_logging prepare_logging "${DATETIME_FILENAME_PART}"
if [[ "$MASS_TESTING_MODE" == parallel ]]; then if [[ "$MASS_TESTING_MODE" == parallel ]]; then
run_mass_testing_parallel run_mass_testing_parallel
else else
@ -25376,12 +25382,12 @@ lets_roll() {
#FIXME: do we need this really here? #FIXME: do we need this really here?
count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified
[[ $? -eq 1 ]] && set_scanning_defaults [[ $? -eq 1 ]] && set_scanning_defaults
run_mx_all_ips "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code run_mx_all_ips "${DATETIME_FILENAME_PART}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code
exit $? exit $?
fi fi
[[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now [[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now
prepare_logging prepare_logging "${DATETIME_FILENAME_PART}"
if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then
NODEIP="$NODE" NODEIP="$NODE"