mirror of
https://github.com/drwetter/testssl.sh.git
synced 2025-10-09 04:02:53 +02:00
rename datetime_started
.. to fname_date as it's more consitent with fname_prefix
This commit is contained in:
44
testssl.sh
44
testssl.sh
@ -1453,7 +1453,7 @@ fileout() {
|
||||
|
||||
|
||||
json_header() {
|
||||
local datetime_started="$1"
|
||||
local fname_date="$1"
|
||||
local fname_prefix
|
||||
local filename_provided=false
|
||||
|
||||
@ -1482,9 +1482,9 @@ json_header() {
|
||||
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
|
||||
fi
|
||||
if [[ -z "$JSONFILE" ]]; then
|
||||
JSONFILE="$fname_prefix-${datetime_started}.json"
|
||||
JSONFILE="$fname_prefix-${fname_date}.json"
|
||||
elif [[ -d "$JSONFILE" ]]; then
|
||||
JSONFILE="$JSONFILE/${fname_prefix}-${datetime_started}.json"
|
||||
JSONFILE="$JSONFILE/${fname_prefix}-${fname_date}.json"
|
||||
fi
|
||||
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
|
||||
if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then
|
||||
@ -1505,7 +1505,7 @@ json_header() {
|
||||
|
||||
|
||||
csv_header() {
|
||||
local datetime_started="$1"
|
||||
local fname_date="$1"
|
||||
local fname_prefix
|
||||
local filename_provided=false
|
||||
|
||||
@ -1531,9 +1531,9 @@ csv_header() {
|
||||
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
|
||||
fi
|
||||
if [[ -z "$CSVFILE" ]]; then
|
||||
CSVFILE="${fname_prefix}-${datetime_started}.csv"
|
||||
CSVFILE="${fname_prefix}-${fname_date}.csv"
|
||||
elif [[ -d "$CSVFILE" ]]; then
|
||||
CSVFILE="$CSVFILE/${fname_prefix}-${datetime_started}.csv"
|
||||
CSVFILE="$CSVFILE/${fname_prefix}-${fname_date}.csv"
|
||||
fi
|
||||
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
|
||||
if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then
|
||||
@ -1560,7 +1560,7 @@ csv_header() {
|
||||
################# END JSON file functions. START HTML functions ####################
|
||||
|
||||
html_header() {
|
||||
local datetime_started="$1"
|
||||
local fname_date="$1"
|
||||
local fname_prefix
|
||||
local filename_provided=false
|
||||
|
||||
@ -1589,9 +1589,9 @@ html_header() {
|
||||
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
|
||||
fi
|
||||
if [[ -z "$HTMLFILE" ]]; then
|
||||
HTMLFILE="$fname_prefix-${datetime_started}.html"
|
||||
HTMLFILE="$fname_prefix-${fname_date}.html"
|
||||
elif [[ -d "$HTMLFILE" ]]; then
|
||||
HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_started}.html"
|
||||
HTMLFILE="$HTMLFILE/$fname_prefix-${fname_date}.html"
|
||||
fi
|
||||
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
|
||||
if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then
|
||||
@ -1639,7 +1639,7 @@ html_footer() {
|
||||
################# END HTML file functions ####################
|
||||
|
||||
prepare_logging() {
|
||||
local datetime_started="$1"
|
||||
local fname_date="$1"
|
||||
# arg2: for testing mx records name we put a name of logfile in here, otherwise we get strange file names
|
||||
local fname_prefix="$2"
|
||||
local filename_provided=false
|
||||
@ -1658,10 +1658,10 @@ prepare_logging() {
|
||||
[[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
|
||||
|
||||
if [[ -z "$LOGFILE" ]]; then
|
||||
LOGFILE="$fname_prefix-${datetime_started}.log"
|
||||
LOGFILE="$fname_prefix-${fname_date}.log"
|
||||
elif [[ -d "$LOGFILE" ]]; then
|
||||
# actually we were instructed to place all files in a DIR instead of the current working dir
|
||||
LOGFILE="$LOGFILE/$fname_prefix-${datetime_started}.log"
|
||||
LOGFILE="$LOGFILE/$fname_prefix-${fname_date}.log"
|
||||
else
|
||||
: # just for clarity: a log file was specified, no need to do anything else
|
||||
fi
|
||||
@ -23416,7 +23416,7 @@ draw_line() {
|
||||
|
||||
|
||||
run_mx_all_ips() {
|
||||
local datetime_started="$1"
|
||||
local fname_date="$1"
|
||||
local mxs mx
|
||||
local mxport
|
||||
local -i ret=0
|
||||
@ -23433,9 +23433,9 @@ run_mx_all_ips() {
|
||||
fi
|
||||
mxport=${3:-25}
|
||||
if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then
|
||||
prepare_logging "${datetime_started}"
|
||||
prepare_logging "${fname_date}"
|
||||
else
|
||||
prepare_logging "${datetime_started}" "${FNAME_PREFIX}mx-$1"
|
||||
prepare_logging "${fname_date}" "${FNAME_PREFIX}mx-$1"
|
||||
fi
|
||||
if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then
|
||||
[[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true
|
||||
@ -25329,7 +25329,7 @@ lets_roll() {
|
||||
RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret?
|
||||
ip=""
|
||||
stopwatch start
|
||||
datetime_started="$(date +"%Y%m%d-%H%M")"
|
||||
local fname_date="$(date +"%Y%m%d-%H%M")"
|
||||
|
||||
lets_roll init
|
||||
initialize_globals
|
||||
@ -25340,9 +25340,9 @@ lets_roll() {
|
||||
# html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the
|
||||
# command line contains --htmlfile <htmlfile> or --html, it'll make problems with html output, see #692.
|
||||
# json_header and csv_header could be called later but for context reasons we'll leave it here
|
||||
html_header "${datetime_started}"
|
||||
json_header "${datetime_started}"
|
||||
csv_header "${datetime_started}"
|
||||
html_header "${fname_date}"
|
||||
json_header "${fname_date}"
|
||||
csv_header "${fname_date}"
|
||||
get_install_dir
|
||||
# see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705
|
||||
[[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE"
|
||||
@ -25367,7 +25367,7 @@ lets_roll() {
|
||||
fileout_banner
|
||||
|
||||
if "$do_mass_testing"; then
|
||||
prepare_logging "${datetime_started}"
|
||||
prepare_logging "${fname_date}"
|
||||
if [[ "$MASS_TESTING_MODE" == parallel ]]; then
|
||||
run_mass_testing_parallel
|
||||
else
|
||||
@ -25382,12 +25382,12 @@ lets_roll() {
|
||||
#FIXME: do we need this really here?
|
||||
count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified
|
||||
[[ $? -eq 1 ]] && set_scanning_defaults
|
||||
run_mx_all_ips "${datetime_started}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code
|
||||
run_mx_all_ips "${fname_date}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code
|
||||
exit $?
|
||||
fi
|
||||
|
||||
[[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now
|
||||
prepare_logging "${datetime_started}"
|
||||
prepare_logging "${fname_date}"
|
||||
|
||||
if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then
|
||||
NODEIP="$NODE"
|
||||
|
Reference in New Issue
Block a user