From 67aba03a4199c53cddb60e499c47ed9cc63b6ad5 Mon Sep 17 00:00:00 2001 From: Jan Dvorak Date: Thu, 25 Sep 2025 23:26:33 +0200 Subject: [PATCH 1/5] Use common datetime part when naming output files across all formats - the datetime is fetched just once - it is then passed to the functions that start the output files, always as arg1 --- testssl.sh | 46 ++++++++++++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/testssl.sh b/testssl.sh index c6a10ae..73cd4f6 100755 --- a/testssl.sh +++ b/testssl.sh @@ -1453,6 +1453,7 @@ fileout() { json_header() { + local datetime_filename_part="$1" local fname_prefix local filename_provided=false @@ -1481,9 +1482,9 @@ json_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$JSONFILE" ]]; then - JSONFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".json)" + JSONFILE="$fname_prefix-${datetime_filename_part}.json" elif [[ -d "$JSONFILE" ]]; then - JSONFILE="$JSONFILE/${fname_prefix}-$(date +"%Y%m%d-%H%M".json)" + JSONFILE="$JSONFILE/${fname_prefix}-${datetime_filename_part}.json" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then @@ -1504,6 +1505,7 @@ json_header() { csv_header() { + local datetime_filename_part="$1" local fname_prefix local filename_provided=false @@ -1529,9 +1531,9 @@ csv_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$CSVFILE" ]]; then - CSVFILE="${fname_prefix}-$(date +"%Y%m%d-%H%M".csv)" + CSVFILE="${fname_prefix}-${datetime_filename_part}.csv" elif [[ -d "$CSVFILE" ]]; then - CSVFILE="$CSVFILE/${fname_prefix}-$(date +"%Y%m%d-%H%M".csv)" + CSVFILE="$CSVFILE/${fname_prefix}-${datetime_filename_part}.csv" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then @@ -1558,6 +1560,7 @@ csv_header() { ################# END JSON file functions. START HTML functions #################### html_header() { + local datetime_filename_part="$1" local fname_prefix local filename_provided=false @@ -1586,9 +1589,9 @@ html_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$HTMLFILE" ]]; then - HTMLFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".html)" + HTMLFILE="$fname_prefix-${datetime_filename_part}.html" elif [[ -d "$HTMLFILE" ]]; then - HTMLFILE="$HTMLFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".html)" + HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_filename_part}.html" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then @@ -1636,8 +1639,9 @@ html_footer() { ################# END HTML file functions #################### prepare_logging() { - # arg1: for testing mx records name we put a name of logfile in here, otherwise we get strange file names - local fname_prefix="$1" + local datetime_filename_part="$1" + # arg2: for testing mx records name we put a name of logfile in here, otherwise we get strange file names + local fname_prefix="$2" local filename_provided=false if [[ -n "$PARENT_LOGFILE" ]]; then @@ -1654,10 +1658,10 @@ prepare_logging() { [[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" if [[ -z "$LOGFILE" ]]; then - LOGFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".log)" + LOGFILE="$fname_prefix-${datetime_filename_part}.log" elif [[ -d "$LOGFILE" ]]; then # actually we were instructed to place all files in a DIR instead of the current working dir - LOGFILE="$LOGFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".log)" + LOGFILE="$LOGFILE/$fname_prefix-${datetime_filename_part}.log" else : # just for clarity: a log file was specified, no need to do anything else fi @@ -23412,6 +23416,7 @@ draw_line() { run_mx_all_ips() { + local datetime_filename_part="$1" local mxs mx local mxport local -i ret=0 @@ -23419,18 +23424,18 @@ run_mx_all_ips() { STARTTLS_PROTOCOL="smtp" # test first higher priority servers - mxs=$(get_mx_record "$1" | sort -n | sed -e 's/^.* //' -e 's/\.$//' | tr '\n' ' ') + mxs=$(get_mx_record "$2" | sort -n | sed -e 's/^.* //' -e 's/\.$//' | tr '\n' ' ') if [[ $CMDLINE_IP == one ]]; then word="as instructed one" # with highest priority mxs=${mxs%% *} else word="the only" fi - mxport=${2:-25} + mxport=${3:-25} if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then - prepare_logging + prepare_logging "${datetime_filename_part}" else - prepare_logging "${FNAME_PREFIX}mx-$1" + prepare_logging "${datetime_filename_part}" "${FNAME_PREFIX}mx-$1" fi if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then [[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true @@ -25324,6 +25329,7 @@ lets_roll() { RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret? ip="" stopwatch start + DATETIME_FILENAME_PART="$(date +"%Y%m%d-%H%M")" lets_roll init initialize_globals @@ -25334,9 +25340,9 @@ lets_roll() { # html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the # command line contains --htmlfile or --html, it'll make problems with html output, see #692. # json_header and csv_header could be called later but for context reasons we'll leave it here - html_header - json_header - csv_header + html_header "${DATETIME_FILENAME_PART}" + json_header "${DATETIME_FILENAME_PART}" + csv_header "${DATETIME_FILENAME_PART}" get_install_dir # see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705 [[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE" @@ -25361,7 +25367,7 @@ lets_roll() { fileout_banner if "$do_mass_testing"; then - prepare_logging + prepare_logging "${DATETIME_FILENAME_PART}" if [[ "$MASS_TESTING_MODE" == parallel ]]; then run_mass_testing_parallel else @@ -25376,12 +25382,12 @@ lets_roll() { #FIXME: do we need this really here? count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified [[ $? -eq 1 ]] && set_scanning_defaults - run_mx_all_ips "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code + run_mx_all_ips "${DATETIME_FILENAME_PART}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code exit $? fi [[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now - prepare_logging + prepare_logging "${DATETIME_FILENAME_PART}" if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then NODEIP="$NODE" From e0009cf0cb596c5a3ab0f614cfdd0f21b1c17bed Mon Sep 17 00:00:00 2001 From: Jan Dvorak Date: Fri, 26 Sep 2025 12:18:44 +0200 Subject: [PATCH 2/5] Adapt variable naming (`datetime_started` now) --- testssl.sh | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/testssl.sh b/testssl.sh index 73cd4f6..a051c47 100755 --- a/testssl.sh +++ b/testssl.sh @@ -1453,7 +1453,7 @@ fileout() { json_header() { - local datetime_filename_part="$1" + local datetime_started="$1" local fname_prefix local filename_provided=false @@ -1482,9 +1482,9 @@ json_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$JSONFILE" ]]; then - JSONFILE="$fname_prefix-${datetime_filename_part}.json" + JSONFILE="$fname_prefix-${datetime_started}.json" elif [[ -d "$JSONFILE" ]]; then - JSONFILE="$JSONFILE/${fname_prefix}-${datetime_filename_part}.json" + JSONFILE="$JSONFILE/${fname_prefix}-${datetime_started}.json" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then @@ -1505,7 +1505,7 @@ json_header() { csv_header() { - local datetime_filename_part="$1" + local datetime_started="$1" local fname_prefix local filename_provided=false @@ -1531,9 +1531,9 @@ csv_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$CSVFILE" ]]; then - CSVFILE="${fname_prefix}-${datetime_filename_part}.csv" + CSVFILE="${fname_prefix}-${datetime_started}.csv" elif [[ -d "$CSVFILE" ]]; then - CSVFILE="$CSVFILE/${fname_prefix}-${datetime_filename_part}.csv" + CSVFILE="$CSVFILE/${fname_prefix}-${datetime_started}.csv" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then @@ -1560,7 +1560,7 @@ csv_header() { ################# END JSON file functions. START HTML functions #################### html_header() { - local datetime_filename_part="$1" + local datetime_started="$1" local fname_prefix local filename_provided=false @@ -1589,9 +1589,9 @@ html_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$HTMLFILE" ]]; then - HTMLFILE="$fname_prefix-${datetime_filename_part}.html" + HTMLFILE="$fname_prefix-${datetime_started}.html" elif [[ -d "$HTMLFILE" ]]; then - HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_filename_part}.html" + HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_started}.html" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then @@ -1639,7 +1639,7 @@ html_footer() { ################# END HTML file functions #################### prepare_logging() { - local datetime_filename_part="$1" + local datetime_started="$1" # arg2: for testing mx records name we put a name of logfile in here, otherwise we get strange file names local fname_prefix="$2" local filename_provided=false @@ -1658,10 +1658,10 @@ prepare_logging() { [[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" if [[ -z "$LOGFILE" ]]; then - LOGFILE="$fname_prefix-${datetime_filename_part}.log" + LOGFILE="$fname_prefix-${datetime_started}.log" elif [[ -d "$LOGFILE" ]]; then # actually we were instructed to place all files in a DIR instead of the current working dir - LOGFILE="$LOGFILE/$fname_prefix-${datetime_filename_part}.log" + LOGFILE="$LOGFILE/$fname_prefix-${datetime_started}.log" else : # just for clarity: a log file was specified, no need to do anything else fi @@ -23416,7 +23416,7 @@ draw_line() { run_mx_all_ips() { - local datetime_filename_part="$1" + local datetime_started="$1" local mxs mx local mxport local -i ret=0 @@ -23433,9 +23433,9 @@ run_mx_all_ips() { fi mxport=${3:-25} if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then - prepare_logging "${datetime_filename_part}" + prepare_logging "${datetime_started}" else - prepare_logging "${datetime_filename_part}" "${FNAME_PREFIX}mx-$1" + prepare_logging "${datetime_started}" "${FNAME_PREFIX}mx-$1" fi if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then [[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true @@ -25329,7 +25329,7 @@ lets_roll() { RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret? ip="" stopwatch start - DATETIME_FILENAME_PART="$(date +"%Y%m%d-%H%M")" + datetime_started="$(date +"%Y%m%d-%H%M")" lets_roll init initialize_globals @@ -25340,9 +25340,9 @@ lets_roll() { # html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the # command line contains --htmlfile or --html, it'll make problems with html output, see #692. # json_header and csv_header could be called later but for context reasons we'll leave it here - html_header "${DATETIME_FILENAME_PART}" - json_header "${DATETIME_FILENAME_PART}" - csv_header "${DATETIME_FILENAME_PART}" + html_header "${datetime_started}" + json_header "${datetime_started}" + csv_header "${datetime_started}" get_install_dir # see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705 [[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE" @@ -25367,7 +25367,7 @@ lets_roll() { fileout_banner if "$do_mass_testing"; then - prepare_logging "${DATETIME_FILENAME_PART}" + prepare_logging "${datetime_started}" if [[ "$MASS_TESTING_MODE" == parallel ]]; then run_mass_testing_parallel else @@ -25382,12 +25382,12 @@ lets_roll() { #FIXME: do we need this really here? count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified [[ $? -eq 1 ]] && set_scanning_defaults - run_mx_all_ips "${DATETIME_FILENAME_PART}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code + run_mx_all_ips "${datetime_started}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code exit $? fi [[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now - prepare_logging "${DATETIME_FILENAME_PART}" + prepare_logging "${datetime_started}" if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then NODEIP="$NODE" From 1d6ddfb352cbe94caec46c4d76e337e3c9968897 Mon Sep 17 00:00:00 2001 From: Dirk Date: Tue, 30 Sep 2025 13:35:08 +0200 Subject: [PATCH 3/5] rename datetime_started .. to fname_date as it's more consitent with fname_prefix --- testssl.sh | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/testssl.sh b/testssl.sh index a051c47..e0fc943 100755 --- a/testssl.sh +++ b/testssl.sh @@ -1453,7 +1453,7 @@ fileout() { json_header() { - local datetime_started="$1" + local fname_date="$1" local fname_prefix local filename_provided=false @@ -1482,9 +1482,9 @@ json_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$JSONFILE" ]]; then - JSONFILE="$fname_prefix-${datetime_started}.json" + JSONFILE="$fname_prefix-${fname_date}.json" elif [[ -d "$JSONFILE" ]]; then - JSONFILE="$JSONFILE/${fname_prefix}-${datetime_started}.json" + JSONFILE="$JSONFILE/${fname_prefix}-${fname_date}.json" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then @@ -1505,7 +1505,7 @@ json_header() { csv_header() { - local datetime_started="$1" + local fname_date="$1" local fname_prefix local filename_provided=false @@ -1531,9 +1531,9 @@ csv_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$CSVFILE" ]]; then - CSVFILE="${fname_prefix}-${datetime_started}.csv" + CSVFILE="${fname_prefix}-${fname_date}.csv" elif [[ -d "$CSVFILE" ]]; then - CSVFILE="$CSVFILE/${fname_prefix}-${datetime_started}.csv" + CSVFILE="$CSVFILE/${fname_prefix}-${fname_date}.csv" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then @@ -1560,7 +1560,7 @@ csv_header() { ################# END JSON file functions. START HTML functions #################### html_header() { - local datetime_started="$1" + local fname_date="$1" local fname_prefix local filename_provided=false @@ -1589,9 +1589,9 @@ html_header() { fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" fi if [[ -z "$HTMLFILE" ]]; then - HTMLFILE="$fname_prefix-${datetime_started}.html" + HTMLFILE="$fname_prefix-${fname_date}.html" elif [[ -d "$HTMLFILE" ]]; then - HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_started}.html" + HTMLFILE="$HTMLFILE/$fname_prefix-${fname_date}.html" fi # Silently reset APPEND var if the file doesn't exist as otherwise it won't be created if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then @@ -1639,7 +1639,7 @@ html_footer() { ################# END HTML file functions #################### prepare_logging() { - local datetime_started="$1" + local fname_date="$1" # arg2: for testing mx records name we put a name of logfile in here, otherwise we get strange file names local fname_prefix="$2" local filename_provided=false @@ -1658,10 +1658,10 @@ prepare_logging() { [[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}" if [[ -z "$LOGFILE" ]]; then - LOGFILE="$fname_prefix-${datetime_started}.log" + LOGFILE="$fname_prefix-${fname_date}.log" elif [[ -d "$LOGFILE" ]]; then # actually we were instructed to place all files in a DIR instead of the current working dir - LOGFILE="$LOGFILE/$fname_prefix-${datetime_started}.log" + LOGFILE="$LOGFILE/$fname_prefix-${fname_date}.log" else : # just for clarity: a log file was specified, no need to do anything else fi @@ -23416,7 +23416,7 @@ draw_line() { run_mx_all_ips() { - local datetime_started="$1" + local fname_date="$1" local mxs mx local mxport local -i ret=0 @@ -23433,9 +23433,9 @@ run_mx_all_ips() { fi mxport=${3:-25} if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then - prepare_logging "${datetime_started}" + prepare_logging "${fname_date}" else - prepare_logging "${datetime_started}" "${FNAME_PREFIX}mx-$1" + prepare_logging "${fname_date}" "${FNAME_PREFIX}mx-$1" fi if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then [[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true @@ -25329,7 +25329,7 @@ lets_roll() { RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret? ip="" stopwatch start - datetime_started="$(date +"%Y%m%d-%H%M")" + local fname_date="$(date +"%Y%m%d-%H%M")" lets_roll init initialize_globals @@ -25340,9 +25340,9 @@ lets_roll() { # html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the # command line contains --htmlfile or --html, it'll make problems with html output, see #692. # json_header and csv_header could be called later but for context reasons we'll leave it here - html_header "${datetime_started}" - json_header "${datetime_started}" - csv_header "${datetime_started}" + html_header "${fname_date}" + json_header "${fname_date}" + csv_header "${fname_date}" get_install_dir # see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705 [[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE" @@ -25367,7 +25367,7 @@ lets_roll() { fileout_banner if "$do_mass_testing"; then - prepare_logging "${datetime_started}" + prepare_logging "${fname_date}" if [[ "$MASS_TESTING_MODE" == parallel ]]; then run_mass_testing_parallel else @@ -25382,12 +25382,12 @@ lets_roll() { #FIXME: do we need this really here? count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified [[ $? -eq 1 ]] && set_scanning_defaults - run_mx_all_ips "${datetime_started}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code + run_mx_all_ips "${fname_date}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code exit $? fi [[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now - prepare_logging "${datetime_started}" + prepare_logging "${fname_date}" if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then NODEIP="$NODE" From e8ab2c74e6a29c585e781dd120a6c87d826af10d Mon Sep 17 00:00:00 2001 From: Dirk Date: Tue, 30 Sep 2025 13:56:25 +0200 Subject: [PATCH 4/5] straighten global definitions in the very bottom --- testssl.sh | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/testssl.sh b/testssl.sh index e0fc943..b778eb6 100755 --- a/testssl.sh +++ b/testssl.sh @@ -25326,23 +25326,23 @@ lets_roll() { ################# main ################# - RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret? - ip="" + RET=0 # this is a global as a function main() is problematic, see #705. Should we toss then all local $ret? + IP="" # global used only here stopwatch start - local fname_date="$(date +"%Y%m%d-%H%M")" + FNAME_DATE="$(date +"%Y%m%d-%H%M")" # a global var, and a definition via local doesn't work here. Omiting definition above lets_roll init initialize_globals - check_base_requirements # needs to come after $do_html is defined + check_base_requirements # needs to come after $do_html is defined parse_cmd_line "$@" # CMDLINE_PARSED has been set now. Don't put a function immediately after this which calls fatal(). # Rather put it after csv_header below. # html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the # command line contains --htmlfile or --html, it'll make problems with html output, see #692. # json_header and csv_header could be called later but for context reasons we'll leave it here - html_header "${fname_date}" - json_header "${fname_date}" - csv_header "${fname_date}" + html_header "${FNAME_DATE}" + json_header "${FNAME_DATE}" + csv_header "${FNAME_DATE}" get_install_dir # see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705 [[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE" @@ -25367,7 +25367,7 @@ lets_roll() { fileout_banner if "$do_mass_testing"; then - prepare_logging "${fname_date}" + prepare_logging "${FNAME_DATE}" if [[ "$MASS_TESTING_MODE" == parallel ]]; then run_mass_testing_parallel else @@ -25382,12 +25382,12 @@ lets_roll() { #FIXME: do we need this really here? count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified [[ $? -eq 1 ]] && set_scanning_defaults - run_mx_all_ips "${fname_date}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code + run_mx_all_ips "${FNAME_DATE}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code exit $? fi [[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now - prepare_logging "${fname_date}" + prepare_logging "${FNAME_DATE}" if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then NODEIP="$NODE" @@ -25406,10 +25406,10 @@ lets_roll() { pr_bold "Testing all IP addresses (port $PORT): " fi outln "$IPADDRs2CHECK" - for ip in $IPADDRs2CHECK; do + for IP in $IPADDRs2CHECK; do draw_line "-" $((TERM_WIDTH * 2 / 3)) outln - NODEIP="$ip" + NODEIP="$IP" lets_roll "${STARTTLS_PROTOCOL}" RET=$((RET + $?)) # RET value per IP address done From 123684f55443d95db06b865e3c73d85ebb8e9ff8 Mon Sep 17 00:00:00 2001 From: Dirk Date: Tue, 30 Sep 2025 13:58:28 +0200 Subject: [PATCH 5/5] make spellchecker and myself happy ;-) --- testssl.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testssl.sh b/testssl.sh index b778eb6..946e9da 100755 --- a/testssl.sh +++ b/testssl.sh @@ -25329,7 +25329,7 @@ lets_roll() { RET=0 # this is a global as a function main() is problematic, see #705. Should we toss then all local $ret? IP="" # global used only here stopwatch start - FNAME_DATE="$(date +"%Y%m%d-%H%M")" # a global var, and a definition via local doesn't work here. Omiting definition above + FNAME_DATE="$(date +"%Y%m%d-%H%M")" # a global var, and a definition via local doesn't work here. Omitting definition above lets_roll init initialize_globals