Merge branch '2.9dev' into openssl_location
This commit is contained in:
commit
25977d5537
116
testssl.sh
116
testssl.sh
|
@ -174,10 +174,10 @@ DEBUG=${DEBUG:-0} # 1: normal putput the files in /tmp/ ar
|
||||||
# 6: whole 9 yards
|
# 6: whole 9 yards
|
||||||
FAST=${FAST:-false} # preference: show only first cipher, run_allciphers with openssl instead of sockets
|
FAST=${FAST:-false} # preference: show only first cipher, run_allciphers with openssl instead of sockets
|
||||||
WIDE=${WIDE:-false} # whether to display for some options just ciphers or a table w hexcode/KX,Enc,strength etc.
|
WIDE=${WIDE:-false} # whether to display for some options just ciphers or a table w hexcode/KX,Enc,strength etc.
|
||||||
LOGFILE=${LOGFILE:-""} # logfile if used
|
LOGFILE="${LOGFILE:-""}" # logfile if used
|
||||||
JSONFILE=${JSONFILE:-""} # jsonfile if used
|
JSONFILE="${JSONFILE:-""}" # jsonfile if used
|
||||||
CSVFILE=${CSVFILE:-""} # csvfile if used
|
CSVFILE="${CSVFILE:-""}" # csvfile if used
|
||||||
HTMLFILE=${HTMLFILE:-""} # HTML if used
|
HTMLFILE="${HTMLFILE:-""}" # HTML if used
|
||||||
FIRST_FINDING=true # Is this the first finding we are outputting to file?
|
FIRST_FINDING=true # Is this the first finding we are outputting to file?
|
||||||
JSONHEADER=true # include JSON headers and footers in HTML file, if one is being created
|
JSONHEADER=true # include JSON headers and footers in HTML file, if one is being created
|
||||||
CSVHEADER=true # same for CSV
|
CSVHEADER=true # same for CSV
|
||||||
|
@ -437,7 +437,7 @@ prln_done_good() { pr_done_good "$1"; outln; }
|
||||||
tm_done_best() { [[ "$COLOR" -eq 2 ]] && ( "$COLORBLIND" && tm_out "\033[1;34m$1" || tm_out "\033[1;32m$1" ) || tm_out "$1"; tm_off; } # green (blue), This is the best
|
tm_done_best() { [[ "$COLOR" -eq 2 ]] && ( "$COLORBLIND" && tm_out "\033[1;34m$1" || tm_out "\033[1;32m$1" ) || tm_out "$1"; tm_off; } # green (blue), This is the best
|
||||||
tmln_done_best() { tm_done_best "$1"; tmln_out; }
|
tmln_done_best() { tm_done_best "$1"; tmln_out; }
|
||||||
pr_done_best() { tm_done_best "$1"; [[ "$COLOR" -eq 2 ]] && ( "$COLORBLIND" && html_out "<span style=\"color:#5c5cff;font-weight:bold;\">$(html_reserved "$1")</span>" || html_out "<span style=\"color:lime;font-weight:bold;\">$(html_reserved "$1")</span>" ) || html_out "$(html_reserved "$1")"; }
|
pr_done_best() { tm_done_best "$1"; [[ "$COLOR" -eq 2 ]] && ( "$COLORBLIND" && html_out "<span style=\"color:#5c5cff;font-weight:bold;\">$(html_reserved "$1")</span>" || html_out "<span style=\"color:lime;font-weight:bold;\">$(html_reserved "$1")</span>" ) || html_out "$(html_reserved "$1")"; }
|
||||||
prln_done_best() { pr_done_best "$1"; outln; }
|
prln_done_best() { pr_done_best "$1"; outln; }
|
||||||
|
|
||||||
tm_svrty_low() { [[ "$COLOR" -eq 2 ]] && tm_out "\033[1;33m$1" || tm_out "$1"; tm_off; } # yellow brown | academic or minor problem
|
tm_svrty_low() { [[ "$COLOR" -eq 2 ]] && tm_out "\033[1;33m$1" || tm_out "$1"; tm_off; } # yellow brown | academic or minor problem
|
||||||
tmln_svrty_low() { tm_svrty_low "$1"; tmln_out; }
|
tmln_svrty_low() { tm_svrty_low "$1"; tmln_out; }
|
||||||
|
@ -473,9 +473,9 @@ pr_bold() { tm_bold "$1"; [[ "$COLOR" -ne 0 ]] && html_out "<span style=\"
|
||||||
prln_bold() { pr_bold "$1" ; outln; }
|
prln_bold() { pr_bold "$1" ; outln; }
|
||||||
|
|
||||||
tm_italic() { [[ "$COLOR" -ne 0 ]] && tm_out "\033[3m$1" || tm_out "$1"; tm_off; }
|
tm_italic() { [[ "$COLOR" -ne 0 ]] && tm_out "\033[3m$1" || tm_out "$1"; tm_off; }
|
||||||
tmln_italic() { tm_italic "$1" ; outln; }
|
tmln_italic() { tm_italic "$1" ; tmln_out; }
|
||||||
pr_italic() { tm_italic "$1"; [[ "$COLOR" -ne 0 ]] && html_out "<i>$(html_reserved "$1")</i>" || html_out "$(html_reserved "$1")"; }
|
pr_italic() { tm_italic "$1"; [[ "$COLOR" -ne 0 ]] && html_out "<i>$(html_reserved "$1")</i>" || html_out "$(html_reserved "$1")"; }
|
||||||
prln_italic() { pr_italic "$1"; tmln_out; }
|
prln_italic() { pr_italic "$1"; outln; }
|
||||||
|
|
||||||
tm_strikethru() { [[ "$COLOR" -ne 0 ]] && tm_out "\033[9m$1" || tm_out "$1"; tm_off; } # ugly!
|
tm_strikethru() { [[ "$COLOR" -ne 0 ]] && tm_out "\033[9m$1" || tm_out "$1"; tm_off; } # ugly!
|
||||||
tmln_strikethru() { tm_strikethru "$1"; tmln_out; }
|
tmln_strikethru() { tm_strikethru "$1"; tmln_out; }
|
||||||
|
@ -793,9 +793,9 @@ json_header() {
|
||||||
fname_prefix="${NODE}"_p"${PORT}"
|
fname_prefix="${NODE}"_p"${PORT}"
|
||||||
fi
|
fi
|
||||||
if [[ -z "$JSONFILE" ]]; then
|
if [[ -z "$JSONFILE" ]]; then
|
||||||
JSONFILE=$fname_prefix-$(date +"%Y%m%d-%H%M".json)
|
JSONFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".json)"
|
||||||
elif [[ -d "$JSONFILE" ]]; then
|
elif [[ -d "$JSONFILE" ]]; then
|
||||||
JSONFILE=$JSONFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".json)
|
JSONFILE="$JSONFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".json)"
|
||||||
fi
|
fi
|
||||||
if "$APPEND"; then
|
if "$APPEND"; then
|
||||||
JSONHEADER=false
|
JSONHEADER=false
|
||||||
|
@ -833,9 +833,9 @@ csv_header() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z "$CSVFILE" ]]; then
|
if [[ -z "$CSVFILE" ]]; then
|
||||||
CSVFILE=$fname_prefix-$(date +"%Y%m%d-%H%M".csv)
|
CSVFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".csv)"
|
||||||
elif [[ -d "$CSVFILE" ]]; then
|
elif [[ -d "$CSVFILE" ]]; then
|
||||||
CSVFILE=$CSVFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".csv)
|
CSVFILE="$CSVFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".csv)"
|
||||||
fi
|
fi
|
||||||
if "$APPEND"; then
|
if "$APPEND"; then
|
||||||
CSVHEADER=false
|
CSVHEADER=false
|
||||||
|
@ -876,9 +876,9 @@ html_header() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z "$HTMLFILE" ]]; then
|
if [[ -z "$HTMLFILE" ]]; then
|
||||||
HTMLFILE=$fname_prefix-$(date +"%Y%m%d-%H%M".html)
|
HTMLFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".html)"
|
||||||
elif [[ -d "$HTMLFILE" ]]; then
|
elif [[ -d "$HTMLFILE" ]]; then
|
||||||
HTMLFILE=$HTMLFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".html)
|
HTMLFILE="$HTMLFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".html)"
|
||||||
fi
|
fi
|
||||||
if "$APPEND"; then
|
if "$APPEND"; then
|
||||||
HTMLHEADER=false
|
HTMLHEADER=false
|
||||||
|
@ -1792,7 +1792,7 @@ run_hpkp() {
|
||||||
if [[ -n "${backup_spki_str[0]}" ]]; then
|
if [[ -n "${backup_spki_str[0]}" ]]; then
|
||||||
pr_done_good "${backup_spki[0]}"
|
pr_done_good "${backup_spki[0]}"
|
||||||
#out " Root CA: "
|
#out " Root CA: "
|
||||||
tm_italic " ${backup_spki_str[0]}"
|
prln_italic " ${backup_spki_str[0]}"
|
||||||
else
|
else
|
||||||
outln "${backup_spki[0]}"
|
outln "${backup_spki[0]}"
|
||||||
fi
|
fi
|
||||||
|
@ -1802,7 +1802,7 @@ run_hpkp() {
|
||||||
# it's a Root CA outside the chain
|
# it's a Root CA outside the chain
|
||||||
pr_done_good "$spaces_indented ${backup_spki[i]}"
|
pr_done_good "$spaces_indented ${backup_spki[i]}"
|
||||||
#out " Root CA: "
|
#out " Root CA: "
|
||||||
tm_italic " ${backup_spki_str[i]}"
|
prln_italic " ${backup_spki_str[i]}"
|
||||||
else
|
else
|
||||||
outln "$spaces_indented ${backup_spki[i]}"
|
outln "$spaces_indented ${backup_spki[i]}"
|
||||||
fi
|
fi
|
||||||
|
@ -2072,7 +2072,7 @@ run_cookie_flags() { # ARG1: Path
|
||||||
run_more_flags() {
|
run_more_flags() {
|
||||||
local good_flags2test="X-Frame-Options X-XSS-Protection X-Content-Type-Options Content-Security-Policy X-Content-Security-Policy X-WebKit-CSP Content-Security-Policy-Report-Only"
|
local good_flags2test="X-Frame-Options X-XSS-Protection X-Content-Type-Options Content-Security-Policy X-Content-Security-Policy X-WebKit-CSP Content-Security-Policy-Report-Only"
|
||||||
local other_flags2test="Access-Control-Allow-Origin Upgrade X-Served-By X-UA-Compatible Referrer-Policy"
|
local other_flags2test="Access-Control-Allow-Origin Upgrade X-Served-By X-UA-Compatible Referrer-Policy"
|
||||||
local f2t
|
local f2t line
|
||||||
local first=true
|
local first=true
|
||||||
local spaces=" "
|
local spaces=" "
|
||||||
|
|
||||||
|
@ -2091,7 +2091,8 @@ run_more_flags() {
|
||||||
first=false
|
first=false
|
||||||
fi
|
fi
|
||||||
pr_done_good "$f2t"
|
pr_done_good "$f2t"
|
||||||
outln "$(out_row_aligned_max_width "$HEADERVALUE" "$spaces" $TERM_WIDTH)"
|
line="$(out_row_aligned_max_width "$f2t$HEADERVALUE" "$spaces" $TERM_WIDTH)"
|
||||||
|
outln " ${line#* }"
|
||||||
fileout "$f2t" "OK" "$f2t: $HEADERVALUE"
|
fileout "$f2t" "OK" "$f2t: $HEADERVALUE"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
@ -10438,7 +10439,7 @@ find_openssl_binary() {
|
||||||
# no ERRFILE initialized yet, thus we use /dev/null for stderr directly
|
# no ERRFILE initialized yet, thus we use /dev/null for stderr directly
|
||||||
$OPENSSL version -a 2>/dev/null >/dev/null
|
$OPENSSL version -a 2>/dev/null >/dev/null
|
||||||
if [[ $? -ne 0 ]] || [[ ! -x "$OPENSSL" ]]; then
|
if [[ $? -ne 0 ]] || [[ ! -x "$OPENSSL" ]]; then
|
||||||
fatal "\ncannot exec or find any openssl binary" -5
|
fatal "cannot exec or find any openssl binary" -5
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# http://www.openssl.org/news/openssl-notes.html
|
# http://www.openssl.org/news/openssl-notes.html
|
||||||
|
@ -10885,6 +10886,7 @@ cleanup () {
|
||||||
}
|
}
|
||||||
|
|
||||||
fatal() {
|
fatal() {
|
||||||
|
outln
|
||||||
prln_magenta "Fatal error: $1" >&2
|
prln_magenta "Fatal error: $1" >&2
|
||||||
exit $2
|
exit $2
|
||||||
# 1: cmd line error
|
# 1: cmd line error
|
||||||
|
@ -11016,22 +11018,22 @@ prepare_logging() {
|
||||||
[[ -z "$fname_prefix" ]] && fname_prefix="${NODE}"_p"${PORT}"
|
[[ -z "$fname_prefix" ]] && fname_prefix="${NODE}"_p"${PORT}"
|
||||||
|
|
||||||
if [[ -z "$LOGFILE" ]]; then
|
if [[ -z "$LOGFILE" ]]; then
|
||||||
LOGFILE=$fname_prefix-$(date +"%Y%m%d-%H%M".log)
|
LOGFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".log)"
|
||||||
elif [[ -d "$LOGFILE" ]]; then
|
elif [[ -d "$LOGFILE" ]]; then
|
||||||
# actually we were instructed to place all files in a DIR instead of the current working dir
|
# actually we were instructed to place all files in a DIR instead of the current working dir
|
||||||
LOGFILE=$LOGFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".log)
|
LOGFILE="$LOGFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".log)"
|
||||||
else
|
else
|
||||||
: # just for clarity: a log file was specified, no need to do anything else
|
: # just for clarity: a log file was specified, no need to do anything else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! "$APPEND"; then
|
if ! "$APPEND"; then
|
||||||
[[ -e $LOGFILE ]] && outln && fatal "\"$LOGFILE\" exists. Either use \"--append\" or (re)move it" 1
|
[[ -e "$LOGFILE" ]] && fatal "\"$LOGFILE\" exists. Either use \"--append\" or (re)move it" 1
|
||||||
fi
|
fi
|
||||||
tmln_out "## Scan started as: \"$PROG_NAME $CMDLINE\"" >>${LOGFILE}
|
tmln_out "## Scan started as: \"$PROG_NAME $CMDLINE\"" >>"$LOGFILE"
|
||||||
tmln_out "## at $HNAME:$OPENSSL_LOCATION" >>${LOGFILE}
|
tmln_out "## at $HNAME:$OPENSSL_LOCATION" >>"$LOGFILE"
|
||||||
tmln_out "## version testssl: $VERSION ${GIT_REL_SHORT:-$CVS_REL_SHORT} from $REL_DATE" >>${LOGFILE}
|
tmln_out "## version testssl: $VERSION ${GIT_REL_SHORT:-$CVS_REL_SHORT} from $REL_DATE" >>"$LOGFILE"
|
||||||
tmln_out "## version openssl: \"$OSSL_VER\" from \"$OSSL_BUILD_DATE\")\n" >>${LOGFILE}
|
tmln_out "## version openssl: \"$OSSL_VER\" from \"$OSSL_BUILD_DATE\")\n" >>"$LOGFILE"
|
||||||
exec > >(tee -a ${LOGFILE})
|
exec > >(tee -a "$LOGFILE")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -11355,13 +11357,16 @@ check_proxy() {
|
||||||
fatal "Your $OPENSSL is too old to support the \"-proxy\" option" -5
|
fatal "Your $OPENSSL is too old to support the \"-proxy\" option" -5
|
||||||
fi
|
fi
|
||||||
if [[ "$PROXY" == "auto" ]]; then
|
if [[ "$PROXY" == "auto" ]]; then
|
||||||
# get $ENV
|
# get $ENV (https_proxy is the one we care about)
|
||||||
PROXY=${https_proxy#*\/\/}
|
PROXY="${https_proxy#*\/\/}"
|
||||||
[[ -z "$PROXY" ]] && PROXY=${http_proxy#*\/\/}
|
[[ -z "$PROXY" ]] && PROXY="${http_proxy#*\/\/}"
|
||||||
[[ -z "$PROXY" ]] && fatal "you specified \"--proxy=auto\" but \"\$http(s)_proxy\" is empty" 2
|
[[ -z "$PROXY" ]] && fatal "you specified \"--proxy=auto\" but \"\$http(s)_proxy\" is empty" 2
|
||||||
fi
|
fi
|
||||||
PROXYNODE=${PROXY%:*}
|
# strip off http/https part if supplied:
|
||||||
PROXYPORT=${PROXY#*:}
|
PROXY="${PROXY/http\:\/\//}"
|
||||||
|
PROXY="${PROXY/https\:\/\//}"
|
||||||
|
PROXYNODE="${PROXY%:*}"
|
||||||
|
PROXYPORT="${PROXY#*:}"
|
||||||
is_number "$PROXYPORT" || fatal "Proxy port cannot be determined from \"$PROXY\"" 2
|
is_number "$PROXYPORT" || fatal "Proxy port cannot be determined from \"$PROXY\"" 2
|
||||||
|
|
||||||
#if is_ipv4addr "$PROXYNODE" || is_ipv6addr "$PROXYNODE" ; then
|
#if is_ipv4addr "$PROXYNODE" || is_ipv6addr "$PROXYNODE" ; then
|
||||||
|
@ -11370,7 +11375,7 @@ check_proxy() {
|
||||||
if is_ipv4addr "$PROXYNODE"; then
|
if is_ipv4addr "$PROXYNODE"; then
|
||||||
PROXYIP="$PROXYNODE"
|
PROXYIP="$PROXYNODE"
|
||||||
else
|
else
|
||||||
PROXYIP=$(get_a_record "$PROXYNODE" 2>/dev/null | grep -v alias | sed 's/^.*address //')
|
PROXYIP="$(get_a_record "$PROXYNODE" 2>/dev/null | grep -v alias | sed 's/^.*address //')"
|
||||||
[[ -z "$PROXYIP" ]] && fatal "Proxy IP cannot be determined from \"$PROXYNODE\"" "2"
|
[[ -z "$PROXYIP" ]] && fatal "Proxy IP cannot be determined from \"$PROXYNODE\"" "2"
|
||||||
fi
|
fi
|
||||||
PROXY="-proxy $PROXYIP:$PROXYPORT"
|
PROXY="-proxy $PROXYIP:$PROXYPORT"
|
||||||
|
@ -11546,7 +11551,6 @@ display_rdns_etc() {
|
||||||
local ip further_ip_addrs=""
|
local ip further_ip_addrs=""
|
||||||
local nodeip="$(tr -d '[]' <<< $NODEIP)" # for displaying IPv6 addresses we don't need []
|
local nodeip="$(tr -d '[]' <<< $NODEIP)" # for displaying IPv6 addresses we don't need []
|
||||||
|
|
||||||
|
|
||||||
if [[ -n "$PROXY" ]]; then
|
if [[ -n "$PROXY" ]]; then
|
||||||
out " Via Proxy: $CORRECT_SPACES"
|
out " Via Proxy: $CORRECT_SPACES"
|
||||||
outln "$PROXYIP:$PROXYPORT "
|
outln "$PROXYIP:$PROXYPORT "
|
||||||
|
@ -11648,7 +11652,7 @@ run_mass_testing() {
|
||||||
|
|
||||||
pr_reverse "====== Running in file batch mode with file=\"$FNAME\" ======"; outln "\n"
|
pr_reverse "====== Running in file batch mode with file=\"$FNAME\" ======"; outln "\n"
|
||||||
while read cmdline; do
|
while read cmdline; do
|
||||||
cmdline=$(filter_input "$cmdline")
|
cmdline="$(filter_input "$cmdline")"
|
||||||
[[ -z "$cmdline" ]] && continue
|
[[ -z "$cmdline" ]] && continue
|
||||||
[[ "$cmdline" == "EOF" ]] && break
|
[[ "$cmdline" == "EOF" ]] && break
|
||||||
cmdline="$0 $global_cmdline --warnings=batch $cmdline"
|
cmdline="$0 $global_cmdline --warnings=batch $cmdline"
|
||||||
|
@ -11673,7 +11677,7 @@ modify_global_cmd_line() {
|
||||||
while [[ $# -gt 0 ]]; do
|
while [[ $# -gt 0 ]]; do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
--jsonfile|--jsonfile=*)
|
--jsonfile|--jsonfile=*)
|
||||||
filename=$(parse_opt_equal_sign "$1" "$2")
|
filename="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
ret=$?
|
ret=$?
|
||||||
# If <jsonfile> is a file, then have provide a different
|
# If <jsonfile> is a file, then have provide a different
|
||||||
# file name to each child process. If <jsonfile> is a
|
# file name to each child process. If <jsonfile> is a
|
||||||
|
@ -11685,7 +11689,7 @@ modify_global_cmd_line() {
|
||||||
[[ $ret -eq 0 ]] && global_cmdline+="$2 "
|
[[ $ret -eq 0 ]] && global_cmdline+="$2 "
|
||||||
fi
|
fi
|
||||||
[[ $ret -eq 0 ]] && shift
|
[[ $ret -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
--jsonfile-pretty|--jsonfile-pretty=*)
|
--jsonfile-pretty|--jsonfile-pretty=*)
|
||||||
filename=$(parse_opt_equal_sign "$1" "$2")
|
filename=$(parse_opt_equal_sign "$1" "$2")
|
||||||
ret=$?
|
ret=$?
|
||||||
|
@ -11697,9 +11701,9 @@ modify_global_cmd_line() {
|
||||||
[[ $ret -eq 0 ]] && global_cmdline+="$2 "
|
[[ $ret -eq 0 ]] && global_cmdline+="$2 "
|
||||||
fi
|
fi
|
||||||
[[ $ret -eq 0 ]] && shift
|
[[ $ret -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
--csvfile|--csvfile=*)
|
--csvfile|--csvfile=*)
|
||||||
filename=$(parse_opt_equal_sign "$1" "$2")
|
filename="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
ret=$?
|
ret=$?
|
||||||
# Same as for --jsonfile
|
# Same as for --jsonfile
|
||||||
if "$CSVHEADER"; then
|
if "$CSVHEADER"; then
|
||||||
|
@ -11711,7 +11715,7 @@ modify_global_cmd_line() {
|
||||||
[[ $ret -eq 0 ]] && shift
|
[[ $ret -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
--htmlfile|--htmlfile=*)
|
--htmlfile|--htmlfile=*)
|
||||||
filename=$(parse_opt_equal_sign "$1" "$2")
|
filename="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
ret=$?
|
ret=$?
|
||||||
# Same as for --jsonfile
|
# Same as for --jsonfile
|
||||||
if "$HTMLHEADER"; then
|
if "$HTMLHEADER"; then
|
||||||
|
@ -11756,10 +11760,10 @@ run_mass_testing_parallel() {
|
||||||
fi
|
fi
|
||||||
global_cmdline="$(modify_global_cmd_line $global_cmdline)"
|
global_cmdline="$(modify_global_cmd_line $global_cmdline)"
|
||||||
[[ "$global_cmdline" =~ jsonfile_XXXXXXXX ]] && one_jsonfile=true
|
[[ "$global_cmdline" =~ jsonfile_XXXXXXXX ]] && one_jsonfile=true
|
||||||
|
|
||||||
pr_reverse "====== Running in parallel file batch mode with file=\"$FNAME\" ======"; outln "\n"
|
pr_reverse "====== Running in parallel file batch mode with file=\"$FNAME\" ======"; outln "\n"
|
||||||
while read cmdline; do
|
while read cmdline; do
|
||||||
cmdline=$(filter_input "$cmdline")
|
cmdline="$(filter_input "$cmdline")"
|
||||||
[[ -z "$cmdline" ]] && continue
|
[[ -z "$cmdline" ]] && continue
|
||||||
[[ "$cmdline" == "EOF" ]] && break
|
[[ "$cmdline" == "EOF" ]] && break
|
||||||
cmdline="$0 $global_cmdline --warnings=batch $cmdline"
|
cmdline="$0 $global_cmdline --warnings=batch $cmdline"
|
||||||
|
@ -11952,7 +11956,7 @@ parse_cmd_line() {
|
||||||
PORT=587
|
PORT=587
|
||||||
;;
|
;;
|
||||||
--ip|--ip=*)
|
--ip|--ip=*)
|
||||||
CMDLINE_IP=$(parse_opt_equal_sign "$1" "$2")
|
CMDLINE_IP="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
-n|--nodns)
|
-n|--nodns)
|
||||||
|
@ -11975,7 +11979,7 @@ parse_cmd_line() {
|
||||||
;;
|
;;
|
||||||
-t|-t=*|--starttls|--starttls=*)
|
-t|-t=*|--starttls|--starttls=*)
|
||||||
do_starttls=true
|
do_starttls=true
|
||||||
STARTTLS_PROTOCOL=$(parse_opt_equal_sign "$1" "$2")
|
STARTTLS_PROTOCOL="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
case $STARTTLS_PROTOCOL in
|
case $STARTTLS_PROTOCOL in
|
||||||
ftp|smtp|pop3|imap|xmpp|telnet|ldap|nntp|postgres) ;;
|
ftp|smtp|pop3|imap|xmpp|telnet|ldap|nntp|postgres) ;;
|
||||||
|
@ -12128,7 +12132,7 @@ parse_cmd_line() {
|
||||||
;;
|
;;
|
||||||
--file|--file=*)
|
--file|--file=*)
|
||||||
# no shift here as otherwise URI is empty and it bails out
|
# no shift here as otherwise URI is empty and it bails out
|
||||||
FNAME=$(parse_opt_equal_sign "$1" "$2")
|
FNAME="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
IKNOW_FNAME=true
|
IKNOW_FNAME=true
|
||||||
WARNINGS=batch # set this implicitly!
|
WARNINGS=batch # set this implicitly!
|
||||||
|
@ -12162,7 +12166,7 @@ parse_cmd_line() {
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
--color|--color=*)
|
--color|--color=*)
|
||||||
COLOR=$(parse_opt_equal_sign "$1" "$2")
|
COLOR="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
case $COLOR in
|
case $COLOR in
|
||||||
[0-2]) ;;
|
[0-2]) ;;
|
||||||
|
@ -12179,7 +12183,7 @@ parse_cmd_line() {
|
||||||
;; # DEFINITION of LOGFILE if no arg specified: automagically in parse_hn_port()
|
;; # DEFINITION of LOGFILE if no arg specified: automagically in parse_hn_port()
|
||||||
# following does the same but we can specify a log location additionally
|
# following does the same but we can specify a log location additionally
|
||||||
--logfile|--logfile=*)
|
--logfile|--logfile=*)
|
||||||
LOGFILE=$(parse_opt_equal_sign "$1" "$2")
|
LOGFILE="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
do_logging=true
|
do_logging=true
|
||||||
;;
|
;;
|
||||||
|
@ -12188,7 +12192,7 @@ parse_cmd_line() {
|
||||||
;; # DEFINITION of JSONFILE is not arg specified: automagically in parse_hn_port()
|
;; # DEFINITION of JSONFILE is not arg specified: automagically in parse_hn_port()
|
||||||
# following does the same but we can specify a log location additionally
|
# following does the same but we can specify a log location additionally
|
||||||
--jsonfile|--jsonfile=*)
|
--jsonfile|--jsonfile=*)
|
||||||
JSONFILE=$(parse_opt_equal_sign "$1" "$2")
|
JSONFILE="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
do_json=true
|
do_json=true
|
||||||
;;
|
;;
|
||||||
|
@ -12196,7 +12200,7 @@ parse_cmd_line() {
|
||||||
do_pretty_json=true
|
do_pretty_json=true
|
||||||
;;
|
;;
|
||||||
--jsonfile-pretty|--jsonfile-pretty=*)
|
--jsonfile-pretty|--jsonfile-pretty=*)
|
||||||
JSONFILE=$(parse_opt_equal_sign "$1" "$2")
|
JSONFILE="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
do_pretty_json=true
|
do_pretty_json=true
|
||||||
;;
|
;;
|
||||||
|
@ -12212,7 +12216,7 @@ parse_cmd_line() {
|
||||||
;; # DEFINITION of CSVFILE is not arg specified: automagically in parse_hn_port()
|
;; # DEFINITION of CSVFILE is not arg specified: automagically in parse_hn_port()
|
||||||
# following does the same but we can specify a log location additionally
|
# following does the same but we can specify a log location additionally
|
||||||
--csvfile|--csvfile=*)
|
--csvfile|--csvfile=*)
|
||||||
CSVFILE=$(parse_opt_equal_sign "$1" "$2")
|
CSVFILE="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
do_csv=true
|
do_csv=true
|
||||||
;;
|
;;
|
||||||
|
@ -12221,7 +12225,7 @@ parse_cmd_line() {
|
||||||
;; # DEFINITION of HTMLFILE is not arg specified: automagically in parse_hn_port()
|
;; # DEFINITION of HTMLFILE is not arg specified: automagically in parse_hn_port()
|
||||||
# following does the same but we can specify a file location additionally
|
# following does the same but we can specify a file location additionally
|
||||||
--htmlfile|--htmlfile=*)
|
--htmlfile|--htmlfile=*)
|
||||||
HTMLFILE=$(parse_opt_equal_sign "$1" "$2")
|
HTMLFILE="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
do_html=true
|
do_html=true
|
||||||
;;
|
;;
|
||||||
|
@ -12229,16 +12233,16 @@ parse_cmd_line() {
|
||||||
APPEND=true
|
APPEND=true
|
||||||
;;
|
;;
|
||||||
--openssl|--openssl=*)
|
--openssl|--openssl=*)
|
||||||
OPENSSL=$(parse_opt_equal_sign "$1" "$2")
|
OPENSSL="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
--openssl-timeout|--openssl-timeout=*)
|
--openssl-timeout|--openssl-timeout=*)
|
||||||
OPENSSL_TIMEOUT=$(parse_opt_equal_sign "$1" "$2")
|
OPENSSL_TIMEOUT="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
--mapping|--mapping=*)
|
--mapping|--mapping=*)
|
||||||
local cipher_mapping
|
local cipher_mapping
|
||||||
cipher_mapping=$(parse_opt_equal_sign "$1" "$2")
|
cipher_mapping="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
case "$cipher_mapping" in
|
case "$cipher_mapping" in
|
||||||
no-openssl) DISPLAY_CIPHERNAMES="rfc-only" ;;
|
no-openssl) DISPLAY_CIPHERNAMES="rfc-only" ;;
|
||||||
|
@ -12250,7 +12254,7 @@ parse_cmd_line() {
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
--proxy|--proxy=*)
|
--proxy|--proxy=*)
|
||||||
PROXY=$(parse_opt_equal_sign "$1" "$2")
|
PROXY="$(parse_opt_equal_sign "$1" "$2")"
|
||||||
[[ $? -eq 0 ]] && shift
|
[[ $? -eq 0 ]] && shift
|
||||||
;;
|
;;
|
||||||
-6) # doesn't work automagically. My versions have -DOPENSSL_USE_IPV6, CentOS/RHEL/FC do not
|
-6) # doesn't work automagically. My versions have -DOPENSSL_USE_IPV6, CentOS/RHEL/FC do not
|
||||||
|
@ -12276,12 +12280,12 @@ parse_cmd_line() {
|
||||||
|
|
||||||
# Show usage if no further options were specified
|
# Show usage if no further options were specified
|
||||||
if [[ -z "$1" ]] && [[ -z "$FNAME" ]] && ! $do_display_only; then
|
if [[ -z "$1" ]] && [[ -z "$FNAME" ]] && ! $do_display_only; then
|
||||||
echo && fatal "URI missing" "1"
|
fatal "URI missing" "1"
|
||||||
else
|
else
|
||||||
# left off here is the URI
|
# left off here is the URI
|
||||||
URI="$1"
|
URI="$1"
|
||||||
# parameter after URI supplied:
|
# parameter after URI supplied:
|
||||||
[[ -n "$2" ]] && echo && fatal "URI comes last" "1"
|
[[ -n "$2" ]] && fatal "URI comes last" "1"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
[[ "$DEBUG" -ge 5 ]] && debug_globals
|
[[ "$DEBUG" -ge 5 ]] && debug_globals
|
||||||
|
|
Loading…
Reference in New Issue