e5e10aff81
test_iperf3() is a pretty inscrutable mess of nested background processes. It has a number of ugly sleeps needed to wait for things to complete. Rewrite it to be cleaner: * Use the construct (a & b & wait) to run 'a' and 'b' in parallel, but then wait for them both to complete before continuing * This allows us to wait for both the server and client to finish, rather than sleeping * Use jq to do all the math we need to get the final result, rather than jq followed by some complicated 'bc' mangling Signed-off-by: David Gibson <david@gibson.dropbear.id.au>
417 lines
9.4 KiB
Bash
Executable file
417 lines
9.4 KiB
Bash
Executable file
#!/bin/sh
|
|
#
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
#
|
|
# PASST - Plug A Simple Socket Transport
|
|
# for qemu/UNIX domain socket mode
|
|
#
|
|
# PASTA - Pack A Subtle Tap Abstraction
|
|
# for network namespace/tap device mode
|
|
#
|
|
# test/lib/test - List tests and run them, evaluating directives from files
|
|
#
|
|
# Copyright (c) 2021 Red Hat GmbH
|
|
# Author: Stefano Brivio <sbrivio@redhat.com>
|
|
|
|
# test_iperf3() - Ugly helper for iperf3 directive
|
|
# $1: Variable name: to put the measure bandwidth into
|
|
# $2: Source/client pane name, can be lowercase
|
|
# $3: Destination/server pane name, can be lowercase
|
|
# $4: Destination name or address for client
|
|
# $5: Port number, ${i} is translated to process index
|
|
# $6: Number of processes to run in parallel
|
|
# $7: Run time, in seconds
|
|
# $@: Client options
|
|
test_iperf3() {
|
|
__var="${1}"; shift
|
|
__cpane="$(echo "${1}" | tr [a-z] [A-Z])"; shift
|
|
__spane="$(echo "${1}" | tr [a-z] [A-Z])"; shift
|
|
__dest="${1}"; shift
|
|
__port="${1}"; shift
|
|
__procs="$((${1} - 1))"; shift
|
|
__time="${1}"; shift
|
|
|
|
pane_run "${__spane}" \
|
|
'(' \
|
|
' for i in $(seq 0 '${__procs}'); do' \
|
|
' iperf3 -s1J -p'${__port}' -i'${__time} \
|
|
' > s${i}.json &' \
|
|
' done;' \
|
|
' wait' \
|
|
')'
|
|
|
|
pane_run "${__cpane}" \
|
|
'(' \
|
|
' for i in $(seq 0 '${__procs}'); do' \
|
|
' iperf3 -c '${__dest}' -p '${__port} \
|
|
' -t'${__time}' -T s${i} '"${@}"' &' \
|
|
' done;' \
|
|
' wait' \
|
|
')'
|
|
|
|
pane_status "${__cpane}"
|
|
pane_status "${__spane}"
|
|
|
|
__jval=".end.sum_received.bits_per_second"
|
|
for __opt in ${@}; do
|
|
# UDP test
|
|
[ "${__opt}" = "-u" ] && __jval=".intervals[0].sum.bits_per_second"
|
|
done
|
|
|
|
pane_run "${__spane}" \
|
|
'cat s*.json | jq -rMs "map('${__jval}') | add"'
|
|
pane_wait "${__spane}"
|
|
__bw="$(pane_parse "${__spane}")"
|
|
|
|
pane_run "${__spane}" \
|
|
'for i in $(seq 0 '${__procs}'); do rm s${i}.json; done'
|
|
pane_status "${__spane}"
|
|
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__var}__" "${__bw}" )"
|
|
}
|
|
|
|
test_one_line() {
|
|
__line="${1}"
|
|
|
|
[ ${DEBUG} -eq 1 ] && info DEBUG: "${__line}"
|
|
|
|
# Strip comments
|
|
__line="${__line%%#*}"
|
|
|
|
if [ -n "${TEST_ONE_in_def}" ]; then
|
|
[ "${__line}" = "endef" ] && TEST_ONE_in_def= && return
|
|
# Append $__line to variable TEST_ONE_DEF_<definition name>
|
|
__ifs="${IFS}"
|
|
IFS=
|
|
eval TEST_ONE_DEF_$TEST_ONE_in_def=\"\$\(printf \"%s\\n%s\" \"\$TEST_ONE_DEF_$TEST_ONE_in_def\" \"$__line\"\)\"
|
|
IFS="${__ifs}"
|
|
return
|
|
fi
|
|
|
|
# tab-split command and arguments, apply variable substitutions
|
|
__cmd="${__line%%$(printf '\t')*}"
|
|
__arg="${__line#*$(printf '\t')*}"
|
|
__arg="$(subs_apply "${TEST_ONE_subs}" "${__arg}")"
|
|
|
|
[ ${TEST_ONE_nok} -eq 1 ] && [ "${__cmd}" != "test" ] && continue
|
|
case ${__cmd} in
|
|
"def")
|
|
TEST_ONE_in_def="${__arg}"
|
|
# Clear variable TEST_ONE_DEF_<definition name>
|
|
__ifs="${IFS}"
|
|
IFS= eval TEST_ONE_DEF_$TEST_ONE_in_def=
|
|
IFS="${__ifs}"
|
|
;;
|
|
"tempdir")
|
|
__tmpdir="$(mktemp -d)"
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__arg}__" "${__tmpdir}")"
|
|
TEST_ONE_dirclean="$(list_add "${TEST_ONE_dirclean}" "${__tmpdir}")"
|
|
;;
|
|
"temp")
|
|
__tmpfile="$(mktemp)"
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__arg}__" "${__tmpfile}")"
|
|
TEST_ONE_dirclean="$(list_add "${TEST_ONE_dirclean}" "${__tmpfile}")"
|
|
;;
|
|
"test")
|
|
[ ${TEST_ONE_perf_nok} -eq 0 ] || TEST_ONE_nok=1
|
|
[ ${TEST_ONE_nok} -eq 1 ] && status_test_fail
|
|
[ ${TEST_ONE_nok} -eq 0 ] && status_test_ok
|
|
|
|
status_test_start "${__arg}"
|
|
TEST_ONE_nok=0
|
|
TEST_ONE_perf_nok=0
|
|
;;
|
|
"host")
|
|
pane_run HOST "${__arg}"
|
|
pane_status HOST || TEST_ONE_nok=1
|
|
;;
|
|
"hostb")
|
|
pane_run HOST "${__arg}"
|
|
;;
|
|
"hostw")
|
|
pane_status HOST || TEST_ONE_nok=1
|
|
;;
|
|
"hint")
|
|
tmux send-keys -t ${PANE_HOST} "C-c"
|
|
;;
|
|
"htools")
|
|
pane_run HOST 'which '"${__arg}"' >/dev/null'
|
|
pane_status HOST || TEST_ONE_skip=1
|
|
;;
|
|
"passt")
|
|
pane_run PASST "${__arg}"
|
|
pane_status PASST || TEST_ONE_nok=1
|
|
;;
|
|
"passtb")
|
|
pane_run PASST "${__arg}"
|
|
;;
|
|
"passtw")
|
|
pane_status PASST || TEST_ONE_nok=1
|
|
;;
|
|
"pout")
|
|
__varname="${__arg%% *}"
|
|
pane_run PASST "${__arg#* }"
|
|
pane_wait PASST
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse PASST)")"
|
|
;;
|
|
"guest")
|
|
pane_run GUEST "${__arg}"
|
|
pane_status GUEST || TEST_ONE_nok=1
|
|
;;
|
|
"guestb")
|
|
pane_run GUEST "${__arg}"
|
|
;;
|
|
"guestw")
|
|
pane_status GUEST || TEST_ONE_nok=1
|
|
;;
|
|
"guest1")
|
|
pane_run GUEST_1 "${__arg}"
|
|
pane_status GUEST_1 || TEST_ONE_nok=1
|
|
;;
|
|
"guest1b")
|
|
pane_run GUEST_1 "${__arg}"
|
|
;;
|
|
"guest1w")
|
|
pane_status GUEST_1 || TEST_ONE_nok=1
|
|
;;
|
|
"gtools")
|
|
pane_run GUEST 'which '"${__arg}"' >/dev/null'
|
|
pane_status GUEST || TEST_ONE_skip=1
|
|
;;
|
|
"g1tools")
|
|
pane_run GUEST_1 'which '"${__arg}"' >/dev/null'
|
|
pane_status GUEST_1 || TEST_ONE_skip=1
|
|
;;
|
|
"g2tools")
|
|
pane_run GUEST_2 'which '"${__arg}"' >/dev/null'
|
|
pane_status GUEST_2 || TEST_ONE_skip=1
|
|
;;
|
|
"guest2")
|
|
pane_run GUEST_2 "${__arg}"
|
|
pane_status GUEST_2 || TEST_ONE_nok=1
|
|
;;
|
|
"guest2b")
|
|
pane_run GUEST_2 "${__arg}"
|
|
;;
|
|
"guest2w")
|
|
pane_status GUEST_2 || TEST_ONE_nok=1
|
|
;;
|
|
"ns")
|
|
pane_run NS "${__arg}"
|
|
pane_status NS || TEST_ONE_nok=1
|
|
;;
|
|
"ns1")
|
|
pane_run NS1 "${__arg}"
|
|
pane_status NS1 || TEST_ONE_nok=1
|
|
;;
|
|
"ns2")
|
|
pane_run NS2 "${__arg}"
|
|
pane_status NS2 || TEST_ONE_nok=1
|
|
;;
|
|
"nsb")
|
|
pane_run NS "${__arg}"
|
|
;;
|
|
"ns1b")
|
|
pane_run NS1 "${__arg}"
|
|
;;
|
|
"ns2b")
|
|
pane_run NS2 "${__arg}"
|
|
;;
|
|
"nsw")
|
|
pane_status NS || TEST_ONE_nok=1
|
|
;;
|
|
"ns1w")
|
|
pane_status NS1 || TEST_ONE_nok=1
|
|
;;
|
|
"ns2w")
|
|
pane_status NS2 || TEST_ONE_nok=1
|
|
;;
|
|
"nstools")
|
|
pane_run NS 'which '"${__arg}"' >/dev/null'
|
|
pane_status NS || TEST_ONE_skip=1
|
|
;;
|
|
"gout")
|
|
__varname="${__arg%% *}"
|
|
pane_run GUEST "${__arg#* }"
|
|
pane_wait GUEST
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse GUEST)")"
|
|
;;
|
|
"g1out")
|
|
__varname="${__arg%% *}"
|
|
pane_run GUEST_1 "${__arg#* }"
|
|
pane_wait GUEST_1
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse GUEST_1)")"
|
|
;;
|
|
"g2out")
|
|
__varname="${__arg%% *}"
|
|
pane_run GUEST_2 "${__arg#* }"
|
|
pane_wait GUEST_2
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse GUEST_2)")"
|
|
;;
|
|
"hout")
|
|
__varname="${__arg%% *}"
|
|
pane_run HOST "${__arg#* }"
|
|
pane_wait HOST
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse HOST)")"
|
|
;;
|
|
"nsout")
|
|
__varname="${__arg%% *}"
|
|
pane_run NS "${__arg#* }"
|
|
pane_wait NS
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse NS)")"
|
|
;;
|
|
"ns1out")
|
|
__varname="${__arg%% *}"
|
|
pane_run NS1 "${__arg#* }"
|
|
pane_wait NS1
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse NS1)")"
|
|
;;
|
|
"ns2out")
|
|
__varname="${__arg%% *}"
|
|
pane_run NS2 "${__arg#* }"
|
|
pane_wait NS2
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__varname}__" "$(pane_parse NS2)")"
|
|
;;
|
|
"check")
|
|
info_check "${__arg}"
|
|
__nok=0
|
|
eval "${__arg} || __nok=1"
|
|
if [ ${__nok} -eq 1 ]; then
|
|
TEST_ONE_nok=1
|
|
info_check_failed
|
|
else
|
|
info_check_passed
|
|
fi
|
|
;;
|
|
"sleep")
|
|
sleep "${__arg}"
|
|
;;
|
|
"info")
|
|
info "${__arg}"
|
|
;;
|
|
"report")
|
|
perf_report ${__arg}
|
|
;;
|
|
"th")
|
|
table_header ${__arg}
|
|
;;
|
|
"tr")
|
|
table_row "${__arg}"
|
|
;;
|
|
"tl")
|
|
table_line "${__arg}"
|
|
;;
|
|
"te")
|
|
table_end
|
|
;;
|
|
"bw")
|
|
table_value_throughput ${__arg} || TEST_ONE_perf_nok=1
|
|
;;
|
|
"lat")
|
|
table_value_latency ${__arg} || TEST_ONE_perf_nok=1
|
|
;;
|
|
"iperf3")
|
|
test_iperf3 ${__arg}
|
|
;;
|
|
"set")
|
|
TEST_ONE_subs="$(list_add_pair "${TEST_ONE_subs}" "__${__arg%% *}__" "${__arg#* }")"
|
|
;;
|
|
|
|
# Demo commands
|
|
"say")
|
|
text_write "${__arg}"
|
|
;;
|
|
"em")
|
|
em_write "${__arg}"
|
|
;;
|
|
"nl")
|
|
info_nolog ""
|
|
;;
|
|
"hl")
|
|
pane_highlight "${__arg}"
|
|
;;
|
|
"bsp")
|
|
text_backspace "${__arg}"
|
|
;;
|
|
"killp")
|
|
pane_kill "${__arg}"
|
|
;;
|
|
"resize")
|
|
pane_resize ${__arg}
|
|
;;
|
|
*)
|
|
__def_body="$(eval printf \"\$TEST_ONE_DEF_$__cmd\")"
|
|
if [ -n "${__def_body}" ]; then
|
|
__ifs="${IFS}"
|
|
IFS='
|
|
'
|
|
for __def_line in ${__def_body}; do
|
|
IFS= test_one_line "${__def_line}"
|
|
done
|
|
IFS="${__ifs}"
|
|
fi
|
|
;;
|
|
esac
|
|
}
|
|
|
|
# test_one() - Run a single test file evaluating directives
|
|
# $1: Name of test file, relative to test/ directory
|
|
test_one() {
|
|
TEST_ONE_dirclean=
|
|
__test_file="test/${1}"
|
|
|
|
__type="$(file -b --mime-type ${__test_file})"
|
|
if [ "${__type}" = "text/x-shellscript" ]; then
|
|
status_file_start "${1}" 1
|
|
"${__test_file}" && status_test_ok || status_test_fail
|
|
return
|
|
fi
|
|
|
|
if [ ${DEMO} -eq 0 ]; then
|
|
__ntests="$(grep -c "^test$(printf '\t')" "${__test_file}")"
|
|
status_file_start "${1}" "${__ntests}"
|
|
fi
|
|
|
|
[ ${CI} -eq 1 ] && video_link "${1}"
|
|
|
|
TEST_ONE_subs="$(list_add_pair "" "__BASEPATH__" "${BASEPATH}")"
|
|
TEST_ONE_nok=-1
|
|
TEST_ONE_perf_nok=0
|
|
TEST_ONE_skip=0
|
|
TEST_ONE_in_def=
|
|
while IFS= read -r __line; do
|
|
test_one_line "${__line}"
|
|
[ ${TEST_ONE_skip} -eq 1 ] && break
|
|
done < "${__test_file}"
|
|
|
|
for __d in ${TEST_ONE_dirclean}; do
|
|
rm -rf ${__d}
|
|
done
|
|
|
|
[ ${DEMO} -eq 1 ] && return
|
|
|
|
[ ${TEST_ONE_skip} -eq 1 ] && status_test_skip && return
|
|
[ ${TEST_ONE_perf_nok} -eq 0 ] || TEST_ONE_nok=1
|
|
[ ${TEST_ONE_nok} -eq 0 ] && status_test_ok || status_test_fail
|
|
}
|
|
|
|
# test() - Build list of tests to run, in order, then issue test_one()
|
|
# $@: Test files to run, relative to test/
|
|
test() {
|
|
__list=
|
|
|
|
cd test
|
|
for __f; do
|
|
__type="$(file -b --mime-type ${__f})"
|
|
if [ "${__type}" = "text/x-shellscript" ]; then
|
|
__list="$(list_add "${__list}" "${__f}")"
|
|
continue
|
|
fi
|
|
__list="$(list_add "${__list}" "${__f}")"
|
|
done
|
|
cd ..
|
|
|
|
for __f in ${__list}; do
|
|
test_one "${__f}"
|
|
done
|
|
}
|