chore: Apply shfmt to shell scripts

main
Sean McBride 4 years ago
parent 66482d012c
commit de5ee9af53

@ -69,5 +69,7 @@
"C_Cpp.files.exclude": { "C_Cpp.files.exclude": {
"awsm/wasmception": true, "awsm/wasmception": true,
"**/.vscode": true "**/.vscode": true
} },
"shellformat.flag": "-ln=bash -i 0 -bn -ci -sr -kp"
} }

@ -57,7 +57,7 @@ envsetup() {
# Check to see if the sledge:latest image exists, exiting if it does # Check to see if the sledge:latest image exists, exiting if it does
# Because sledge:latest is "forked" after completing envsetup, this suggests that envsetup was already run # Because sledge:latest is "forked" after completing envsetup, this suggests that envsetup was already run
if docker image inspect ${SYS_DOC_NAMETAG} 1>/dev/null 2>/dev/null; then if docker image inspect ${SYS_DOC_NAMETAG} 1> /dev/null 2> /dev/null; then
echo "${SYS_DOC_NAMETAG} image exists, which means that 'devenv.sh setup' already ran to completion!" echo "${SYS_DOC_NAMETAG} image exists, which means that 'devenv.sh setup' already ran to completion!"
echo "If you are explicitly trying to rebuild SLEdge, run the following:" echo "If you are explicitly trying to rebuild SLEdge, run the following:"
echo "devenv.sh rma | Removes the images sledge:latest AND sledge-dev:latest" echo "devenv.sh rma | Removes the images sledge:latest AND sledge-dev:latest"
@ -67,7 +67,7 @@ envsetup() {
echo "Setting up ${SYS_NAME}" echo "Setting up ${SYS_NAME}"
echo "Updating git submodules" echo "Updating git submodules"
git submodule update --init --recursive 2>/dev/null || :d git submodule update --init --recursive 2> /dev/null || :d
echo "Using Dockerfile.$(uname -m)" echo "Using Dockerfile.$(uname -m)"
rm -f Dockerfile rm -f Dockerfile
@ -77,7 +77,7 @@ envsetup() {
# This UX differs from detecting sledge, which immediately exits # This UX differs from detecting sledge, which immediately exits
# This is disabled because it doesn't seem useful # This is disabled because it doesn't seem useful
if if
docker image inspect "${SYS_DOC_DEVNAMETAG}" 1>/dev/null 2>/dev/null && [ $SYS_BUILD_TIMEOUT -gt 0 ] docker image inspect "${SYS_DOC_DEVNAMETAG}" 1> /dev/null 2> /dev/null && [ $SYS_BUILD_TIMEOUT -gt 0 ]
then then
echo "${SYS_DOC_DEVNAME} image exists, rebuilding it" echo "${SYS_DOC_DEVNAME} image exists, rebuilding it"
echo "(you have ${SYS_BUILD_TIMEOUT}secs to stop the rebuild)" echo "(you have ${SYS_BUILD_TIMEOUT}secs to stop the rebuild)"
@ -95,7 +95,7 @@ envsetup() {
--name=${SYS_DOC_DEVNAME} \ --name=${SYS_DOC_DEVNAME} \
--detach \ --detach \
--mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \ --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
"${SYS_DOC_DEVNAMETAG}" /bin/sleep 99999999 >/dev/null "${SYS_DOC_DEVNAMETAG}" /bin/sleep 99999999 > /dev/null
# Execute the make install command on the sledge-dev image to build the project # Execute the make install command on the sledge-dev image to build the project
echo "Building ${SYS_NAME}" echo "Building ${SYS_NAME}"
@ -121,7 +121,7 @@ envsetup() {
# If the image sledge:latest does not exist, automatically runs envsetup to build sledge and create it # If the image sledge:latest does not exist, automatically runs envsetup to build sledge and create it
# If the a container names sledge is not running, starts it from sledge:latest, mounting the SLEdge project directory to /sledge # If the a container names sledge is not running, starts it from sledge:latest, mounting the SLEdge project directory to /sledge
envrun() { envrun() {
if ! docker image inspect ${SYS_DOC_NAMETAG} >/dev/null; then if ! docker image inspect ${SYS_DOC_NAMETAG} > /dev/null; then
envsetup envsetup
fi fi
@ -136,7 +136,7 @@ envrun() {
--name=${SYS_DOC_NAME} \ --name=${SYS_DOC_NAME} \
--detach \ --detach \
--mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \ --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
${SYS_DOC_NAMETAG} /bin/sleep 99999999 >/dev/null ${SYS_DOC_NAMETAG} /bin/sleep 99999999 > /dev/null
fi fi
echo "Running shell" echo "Running shell"

@ -2,7 +2,7 @@
validate() { validate() {
utility="clang-format" utility="clang-format"
utility_version="$("$utility" --version 2>/dev/null)" || { utility_version="$("$utility" --version 2> /dev/null)" || {
echo "$utility not found in path!" echo "$utility not found in path!"
exit 1 exit 1
} }
@ -44,19 +44,19 @@ help() {
dry_run() { dry_run() {
find runtime \ find runtime \
\( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \ \( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \
-type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print | -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print \
xargs clang-format -Werror -n -ferror-limit=0 | xargs clang-format -Werror -n -ferror-limit=0
} }
format() { format() {
find runtime \ find runtime \
\( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \ \( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \
-type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print | -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print \
xargs clang-format -i | xargs clang-format -i
} }
case $1 in case $1 in
"-h" | "--help") help ;; "-h" | "--help") help ;;
"-d" | "--dry-run") validate && dry_run ;; "-d" | "--dry-run") validate && dry_run ;;
"") validate && format ;; "") validate && format ;;
esac esac

@ -118,7 +118,7 @@ $DRY_RUN_PREFIX ln -sfv "${SYS_COMPILER_REL_DIR}/${COMPILER_EXECUTABLE}" "${SYS_
# `exec "/sledge/awsm/wasmception/dist/bin/clang" --target="wasm32-unknown-unknown-wasm" --sysroot="/sledge/awsm/wasmception/sysroot" "$@"` # `exec "/sledge/awsm/wasmception/dist/bin/clang" --target="wasm32-unknown-unknown-wasm" --sysroot="/sledge/awsm/wasmception/sysroot" "$@"`
for file in clang clang++; do for file in clang clang++; do
wrapper_file="$(mktemp)" wrapper_file="$(mktemp)"
cat >"$wrapper_file" <<EOT cat > "$wrapper_file" << EOT
#! /bin/sh #! /bin/sh
exec "${WASM_BIN}/${file}" --target="$WASM_TARGET" --sysroot="$WASM_SYSROOT" "\$@" exec "${WASM_BIN}/${file}" --target="$WASM_TARGET" --sysroot="$WASM_SYSROOT" "\$@"

@ -18,7 +18,7 @@ if [[ ! -f "./initial_state.dat" ]]; then
fi fi
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
@ -29,9 +29,9 @@ total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@initial_state.dat" localhost:10000 2>/dev/null >./one_iteration_res.dat curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@initial_state.dat" localhost:10000 2> /dev/null > ./one_iteration_res.dat
curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@one_iteration_res.dat" localhost:10001 2>/dev/null >./two_iterations_res.dat curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@one_iteration_res.dat" localhost:10001 2> /dev/null > ./two_iterations_res.dat
curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@two_iterations_res.dat" localhost:10002 2>/dev/null >./three_iterations_res.dat curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@two_iterations_res.dat" localhost:10002 2> /dev/null > ./three_iterations_res.dat
if diff -s one_iteration_res.dat one_iteration.dat && diff -s two_iterations_res.dat two_iterations.dat && diff -s three_iterations_res.dat three_iterations.dat; then if diff -s one_iteration_res.dat one_iteration.dat && diff -s two_iterations_res.dat two_iterations.dat && diff -s three_iterations_res.dat three_iterations.dat; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
rm *_res.dat rm *_res.dat
@ -48,7 +48,7 @@ echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -22,14 +22,14 @@ else
echo "Running under gdb" echo "Running under gdb"
fi fi
expected_result="$(tr -d '\0' <./expected_result.dat)" expected_result="$(tr -d '\0' < ./expected_result.dat)"
success_count=0 success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result="$(curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@ekf_raw.dat" localhost:10000 2>/dev/null | tr -d '\0')" result="$(curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@ekf_raw.dat" localhost:10000 2> /dev/null | tr -d '\0')"
if [[ "$expected_result" == "$result" ]]; then if [[ "$expected_result" == "$result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
else else
@ -44,7 +44,7 @@ echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -33,7 +33,7 @@ file_type=bmp
for class in airplane automobile bird cat deer dog frog horse ship truck; do for class in airplane automobile bird cat deer dog frog horse ship truck; do
for instance in 1 2 3 4 5 6 7 8 9 10; do for instance in 1 2 3 4 5 6 7 8 9 10; do
echo "Classifying $class$instance.$file_type" echo "Classifying $class$instance.$file_type"
curl -H 'Expect:' -H "Content-Type: Image/$file_type" --data-binary "@images/$file_type/$class$instance.$file_type" localhost:10000 2>/dev/null curl -H 'Expect:' -H "Content-Type: Image/$file_type" --data-binary "@images/$file_type/$class$instance.$file_type" localhost:10000 2> /dev/null
done done
done done
@ -42,7 +42,7 @@ exit
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
ext="$RANDOM" ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@plate.jpg" --output "result_$ext.jpg" localhost:10000 2>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@plate.jpg" --output "result_$ext.jpg" localhost:10000 2> /dev/null
actual_size="$(find result_"$ext".jpg -printf "%s")" actual_size="$(find result_"$ext".jpg -printf "%s")"
# echo "$result" # echo "$result"
@ -64,6 +64,6 @@ if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm result_*.jpg rm result_*.jpg
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -24,24 +24,24 @@ for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
ext="$RANDOM" ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_small.jpg" --output "result_${ext}_small.png" localhost:10000 2>/dev/null 1>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_small.jpg" --output "result_${ext}_small.png" localhost:10000 2> /dev/null 1> /dev/null
pixel_differences="$(compare -identify -metric AE "result_${ext}_small.png" expected_result_small.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_${ext}_small.png" expected_result_small.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" != "0" ]]; then if [[ "$pixel_differences" != "0" ]]; then
echo "Small FAIL" echo "Small FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
exit 1 exit 1
fi fi
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_medium.jpg" --output "result_${ext}_medium.png" localhost:10001 2>/dev/null 1>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_medium.jpg" --output "result_${ext}_medium.png" localhost:10001 2> /dev/null 1> /dev/null
pixel_differences="$(compare -identify -metric AE "result_${ext}_medium.png" expected_result_medium.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_${ext}_medium.png" expected_result_medium.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" != "0" ]]; then if [[ "$pixel_differences" != "0" ]]; then
echo "Small FAIL" echo "Small FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
exit 1 exit 1
fi fi
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_large.jpg" --output "result_${ext}_large.png" localhost:10002 2>/dev/null 1>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_large.jpg" --output "result_${ext}_large.png" localhost:10002 2> /dev/null 1> /dev/null
pixel_differences="$(compare -identify -metric AE "result_${ext}_large.png" expected_result_large.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_${ext}_large.png" expected_result_large.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" != "0" ]]; then if [[ "$pixel_differences" != "0" ]]; then
echo "Small FAIL" echo "Small FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
@ -57,7 +57,7 @@ rm -f result_*.png
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -27,9 +27,9 @@ total_count=10
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
ext="$RANDOM" ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@flower.jpg" --output "result_$ext.png" localhost:10000 2>/dev/null 1>/dev/null || exit 1 curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@flower.jpg" --output "result_$ext.png" localhost:10000 2> /dev/null 1> /dev/null || exit 1
pixel_differences="$(compare -identify -metric AE "result_$ext.png" expected_result.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_$ext.png" expected_result.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" == "0" ]]; then if [[ "$pixel_differences" == "0" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
@ -46,7 +46,7 @@ rm result_*.png
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -37,6 +37,6 @@ done
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -11,7 +11,7 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
@ -34,9 +34,9 @@ for ((i = 0; i < total_count; i++)); do
for dpi in "${dpis[@]}"; do for dpi in "${dpis[@]}"; do
echo "${dpi}"_dpi.pnm echo "${dpi}"_dpi.pnm
pango-view --dpi=$dpi --font=mono -qo "${dpi}"_dpi.png -t "$words" pango-view --dpi=$dpi --font=mono -qo "${dpi}"_dpi.png -t "$words"
pngtopnm "${dpi}"_dpi.png >"${dpi}"_dpi.pnm pngtopnm "${dpi}"_dpi.png > "${dpi}"_dpi.pnm
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${dpi}"_dpi.pnm localhost:${dpi_to_port[$dpi]} 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${dpi}"_dpi.pnm localhost:${dpi_to_port[$dpi]} 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
echo "===============================================" echo "==============================================="
@ -48,8 +48,8 @@ if [ "$1" != "-d" ]; then
sleep 2 sleep 2
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm ./*.png ./*.pnm rm ./*.png ./*.pnm
pkill --signal sigterm sledgert >/dev/null 2>/dev/null pkill --signal sigterm sledgert > /dev/null 2> /dev/null
sleep 2 sleep 2
pkill sledgert -9 >/dev/null 2>/dev/null pkill sledgert -9 > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -11,7 +11,7 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
@ -31,22 +31,22 @@ for ((i = 1; i <= total_count; i++)); do
"DejaVu Sans Mono") "DejaVu Sans Mono")
echo "DejaVu Sans Mono" echo "DejaVu Sans Mono"
pango-view --font="DejaVu Sans Mono" -qo mono_words.png -t "$words" || exit 1 pango-view --font="DejaVu Sans Mono" -qo mono_words.png -t "$words" || exit 1
pngtopnm mono_words.png >mono_words.pnm || exit 1 pngtopnm mono_words.png > mono_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @mono_words.pnm localhost:10000 2>/dev/null) result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @mono_words.pnm localhost:10000 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
;; ;;
"Roboto") "Roboto")
echo "Roboto" echo "Roboto"
pango-view --font="Roboto" -qo Roboto_words.png -t "$words" || exit 1 pango-view --font="Roboto" -qo Roboto_words.png -t "$words" || exit 1
pngtopnm Roboto_words.png >Roboto_words.pnm || exit 1 pngtopnm Roboto_words.png > Roboto_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Roboto_words.pnm localhost:10002 2>/dev/null) result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Roboto_words.pnm localhost:10002 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
;; ;;
"Cascadia Code") "Cascadia Code")
echo "Cascadia Code" echo "Cascadia Code"
pango-view --font="Cascadia Code" -qo Cascadia_Code_words.png -t "$words" || exit 1 pango-view --font="Cascadia Code" -qo Cascadia_Code_words.png -t "$words" || exit 1
pngtopnm Cascadia_Code_words.png >Cascadia_Code_words.pnm || exit 1 pngtopnm Cascadia_Code_words.png > Cascadia_Code_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Cascadia_Code_words.pnm localhost:10001 2>/dev/null) result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Cascadia_Code_words.pnm localhost:10001 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
;; ;;
esac esac
@ -59,8 +59,8 @@ if [ "$1" != "-d" ]; then
sleep 2 sleep 2
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm ./*.png ./*.pnm rm ./*.png ./*.pnm
pkill --signal sigterm sledgert >/dev/null 2>/dev/null pkill --signal sigterm sledgert > /dev/null 2> /dev/null
sleep 2 sleep 2
pkill sledgert -9 >/dev/null 2>/dev/null pkill sledgert -9 > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -11,7 +11,7 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
@ -33,9 +33,9 @@ for ((i = 0; i < total_count; i++)); do
echo "${word_count}"_words.pnm echo "${word_count}"_words.pnm
words="$(shuf -n"$word_count" /usr/share/dict/american-english)" words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
pango-view --font=mono -qo "$word_count"_words.png -t "$words" || exit 1 pango-view --font=mono -qo "$word_count"_words.png -t "$words" || exit 1
pngtopnm "$word_count"_words.png >"$word_count"_words.pnm || exit 1 pngtopnm "$word_count"_words.png > "$word_count"_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${word_count}"_words.pnm localhost:${word_count_to_port["$word_count"_words.pnm]} 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${word_count}"_words.pnm localhost:${word_count_to_port["$word_count"_words.pnm]} 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
echo "===============================================" echo "==============================================="
@ -47,8 +47,8 @@ if [ "$1" != "-d" ]; then
sleep 2 sleep 2
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm ./*.png ./*.pnm rm ./*.png ./*.pnm
pkill --signal sigterm sledgert >/dev/null 2>/dev/null pkill --signal sigterm sledgert > /dev/null 2> /dev/null
sleep 2 sleep 2
pkill sledgert -9 >/dev/null 2>/dev/null pkill sledgert -9 > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -25,7 +25,7 @@ total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@5x8.pnm" localhost:10000 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@5x8.pnm" localhost:10000 2> /dev/null)
# echo "$result" # echo "$result"
if [[ "$result" == "$expected_result" ]]; then if [[ "$result" == "$expected_result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
@ -47,7 +47,7 @@ echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -24,7 +24,7 @@ total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@handwrt1.pnm" localhost:10000 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@handwrt1.pnm" localhost:10000 2> /dev/null)
# echo "$result" # echo "$result"
if [[ "$result" == "$expected_result" ]]; then if [[ "$result" == "$expected_result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
@ -45,7 +45,7 @@ echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -23,7 +23,7 @@ total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@hyde.pnm" localhost:10000 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@hyde.pnm" localhost:10000 2> /dev/null)
# echo "$result" # echo "$result"
if [[ "$result" == "$expected_result" ]]; then if [[ "$result" == "$expected_result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
@ -44,7 +44,7 @@ echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
payloads=(fivebyeight/5x8 handwriting/handwrt1 hyde/hyde) payloads=(fivebyeight/5x8 handwriting/handwrt1 hyde/hyde)
ports=(10000 10001 10002) ports=(10000 10001 10002)
@ -39,7 +39,7 @@ echo "Running Experiments"
for i in {0..2}; do for i in {0..2}; do
printf "\t%s Payload: " "${payloads[$i]}" printf "\t%s Payload: " "${payloads[$i]}"
file=$(echo "${payloads[$i]}" | awk -F/ '{print $2}').csv file=$(echo "${payloads[$i]}" | awk -F/ '{print $2}').csv
hey -n "$iterations" -c 3 -cpus 2 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" >"$results_directory/$file" hey -n "$iterations" -c 3 -cpus 2 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" > "$results_directory/$file"
echo "[DONE]" echo "[DONE]"
done done
@ -53,9 +53,9 @@ fi
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Concurrency,Success_Rate\n" >>"$results_directory/success.csv" printf "Concurrency,Success_Rate\n" >> "$results_directory/success.csv"
printf "Concurrency,Throughput\n" >>"$results_directory/throughput.csv" printf "Concurrency,Throughput\n" >> "$results_directory/throughput.csv"
printf "Con,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Con,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Calculate Success Rate for csv # Calculate Success Rate for csv
@ -63,20 +63,20 @@ for payload in ${payloads[*]}; do
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$file"',%3.5f\n", (ok / '"$iterations"' * 100)} END{printf "'"$file"',%3.5f\n", (ok / '"$iterations"' * 100)}
' <"$results_directory/$file.csv" >>"$results_directory/success.csv" ' < "$results_directory/$file.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$file.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$file.csv" \
sort -g >"$results_directory/$file-response.csv" | sort -g > "$results_directory/$file-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$file-response.csv") oks=$(wc -l < "$results_directory/$file-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$file.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$file.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$file" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$file" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -92,7 +92,7 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$file-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$file-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
rm -rf "$results_directory/$file-response.csv" rm -rf "$results_directory/$file-response.csv"
@ -100,8 +100,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots # Generate gnuplots

@ -27,7 +27,7 @@ total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
# ext="$RANDOM" # ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@goforward.raw" localhost:10000 2>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@goforward.raw" localhost:10000 2> /dev/null
# # echo "$result" # # echo "$result"
# if [[ "$expected_size" == "$actual_size" ]]; then # if [[ "$expected_size" == "$actual_size" ]]; then
@ -48,6 +48,6 @@ if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm result_*.jpg rm result_*.jpg
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -33,8 +33,8 @@ log_environment() {
kill_runtime() { kill_runtime() {
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
pkill hey >/dev/null 2>/dev/null pkill hey > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
} }

@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
iterations=10000 iterations=10000
@ -36,7 +36,7 @@ concurrency=(1 20 40 60 80 100)
echo "Running Experiments" echo "Running Experiments"
for conn in ${concurrency[*]}; do for conn in ${concurrency[*]}; do
printf "\t%d Concurrency: " "$conn" printf "\t%d Concurrency: " "$conn"
hey -n "$iterations" -c "$conn" -cpus 2 -o csv -m GET http://localhost:10000 >"$results_directory/con$conn.csv" hey -n "$iterations" -c "$conn" -cpus 2 -o csv -m GET http://localhost:10000 > "$results_directory/con$conn.csv"
echo "[DONE]" echo "[DONE]"
done done
@ -50,29 +50,29 @@ fi
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Concurrency,Success_Rate\n" >>"$results_directory/success.csv" printf "Concurrency,Success_Rate\n" >> "$results_directory/success.csv"
printf "Concurrency,Throughput\n" >>"$results_directory/throughput.csv" printf "Concurrency,Throughput\n" >> "$results_directory/throughput.csv"
printf "Con,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Con,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for conn in ${concurrency[*]}; do for conn in ${concurrency[*]}; do
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$conn"',%3.5f\n", (ok / '"$iterations"' * 100)} END{printf "'"$conn"',%3.5f\n", (ok / '"$iterations"' * 100)}
' <"$results_directory/con$conn.csv" >>"$results_directory/success.csv" ' < "$results_directory/con$conn.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/con$conn.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/con$conn.csv" \
sort -g >"$results_directory/con$conn-response.csv" | sort -g > "$results_directory/con$conn-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/con$conn-response.csv") oks=$(wc -l < "$results_directory/con$conn-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/con$conn.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/con$conn.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%d,%f\n" "$conn" "$throughput" >>"$results_directory/throughput.csv" printf "%d,%f\n" "$conn" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -88,7 +88,7 @@ for conn in ${concurrency[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/con$conn-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/con$conn-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
rm -rf "$results_directory/con$conn-response.csv" rm -rf "$results_directory/con$conn-response.csv"
@ -96,8 +96,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots # Generate gnuplots

@ -13,7 +13,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(40 10) inputs=(40 10)
duration_sec=60 duration_sec=60
@ -30,7 +30,7 @@ sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 > "$results_directory/fib40-con.csv"
# sleep $offset # sleep $offset
# hey -n 25000 -c 1000000 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" & # hey -n 25000 -c 1000000 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" &
# sleep $((duration_sec + offset + 45)) # sleep $((duration_sec + offset + 45))
@ -38,9 +38,9 @@ hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
# durations_s=(60 70) # durations_s=(60 70)
@ -52,21 +52,21 @@ for ((i = 1; i < 2; i++)); do
# duration=${durations_s[$i]} # duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
@ -87,7 +87,7 @@ for ((i = 1; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -95,8 +95,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency; do for file in success latency; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -14,7 +14,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(40 10) inputs=(40 10)
duration_sec=30 duration_sec=30
@ -31,18 +31,18 @@ sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 3 -c 200 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 3 -c 200 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
sleep 30 sleep 30
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
@ -54,7 +54,7 @@ for ((i = 0; i < 2; i++)); do
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
@ -62,20 +62,20 @@ for ((i = 0; i < 2; i++)); do
$7 == 200 {denom++} $7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -91,7 +91,7 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -99,8 +99,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -14,7 +14,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(10) inputs=(10)
duration_sec=30 duration_sec=30
@ -31,16 +31,16 @@ echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
# hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" & # hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" &
# sleep $offset # sleep $offset
hey -z ${duration_sec}s -cpus 6 -c 400 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" hey -z ${duration_sec}s -cpus 6 -c 400 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 > "$results_directory/fib10-con.csv"
# sleep $((duration_sec + offset + 15)) # sleep $((duration_sec + offset + 15))
# sleep 30 # sleep 30
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
@ -52,7 +52,7 @@ for ((i = 0; i < 1; i++)); do
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
@ -60,20 +60,20 @@ for ((i = 0; i < 1; i++)); do
$7 == 200 {denom++} $7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -89,7 +89,7 @@ for ((i = 0; i < 1; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -97,8 +97,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -10,9 +10,9 @@ results_directory="$experiment_directory/res/1606615320-fifo-adm"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
@ -22,27 +22,27 @@ for ((i = 0; i < 2; i++)); do
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -58,7 +58,7 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -66,8 +66,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -15,7 +15,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(40 10) inputs=(40 10)
duration_sec=60 duration_sec=60
@ -41,9 +41,9 @@ offset=5
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
@ -55,7 +55,7 @@ for ((i = 0; i < 2; i++)); do
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
@ -63,20 +63,20 @@ for ((i = 0; i < 2; i++)); do
$7 == 200 {denom++} $7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -92,7 +92,7 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -100,8 +100,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -15,15 +15,15 @@ for scheduler in ${schedulers[*]}; do
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
@ -40,13 +40,13 @@ for scheduler in ${schedulers[*]}; do
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
@ -55,9 +55,9 @@ for scheduler in ${schedulers[*]}; do
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
@ -68,27 +68,27 @@ for scheduler in ${schedulers[*]}; do
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -104,7 +104,7 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -112,8 +112,8 @@ for scheduler in ${schedulers[*]}; do
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -15,15 +15,15 @@ for scheduler in ${schedulers[*]}; do
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
@ -40,13 +40,13 @@ for scheduler in ${schedulers[*]}; do
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
@ -55,9 +55,9 @@ for scheduler in ${schedulers[*]}; do
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
@ -68,27 +68,27 @@ for scheduler in ${schedulers[*]}; do
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -104,7 +104,7 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100} NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -112,8 +112,8 @@ for scheduler in ${schedulers[*]}; do
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -5,7 +5,7 @@ for size in 1024 $((1024 * 10)) $((1024 * 100)) $((1024 * 1024)); do
i=0 i=0
echo -n "Generating $size:" echo -n "Generating $size:"
while ((i < size)); do while ((i < size)); do
printf 'a' >>$size.txt printf 'a' >> $size.txt
((i++)) ((i++))
done done
echo "[DONE]" echo "[DONE]"

@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
payloads=(1024 10240 102400 1048576) payloads=(1024 10240 102400 1048576)
@ -53,7 +53,7 @@ echo "[DONE]"
echo "Running Experiments" echo "Running Experiments"
for i in {0..3}; do for i in {0..3}; do
printf "\t%d Payload: " "${payloads[$i]}" printf "\t%d Payload: " "${payloads[$i]}"
hey -n "$iterations" -c 1 -cpus 2 -o csv -m GET -D "$experiment_directory/body/${payloads[$i]}.txt" http://localhost:"${ports[$i]}" >"$results_directory/${payloads[$i]}.csv" hey -n "$iterations" -c 1 -cpus 2 -o csv -m GET -D "$experiment_directory/body/${payloads[$i]}.txt" http://localhost:"${ports[$i]}" > "$results_directory/${payloads[$i]}.csv"
echo "[DONE]" echo "[DONE]"
done done
@ -66,29 +66,29 @@ fi
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$payload"',%3.5f\n", (ok / '"$iterations"' * 100)} END{printf "'"$payload"',%3.5f\n", (ok / '"$iterations"' * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%d,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%d,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -104,7 +104,7 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
rm -rf "$results_directory/$payload-response.csv" rm -rf "$results_directory/$payload-response.csv"
@ -112,8 +112,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots # Generate gnuplots

@ -12,7 +12,7 @@ results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" | tee -a "$results_directory/$log" PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" | tee -a "$results_directory/$log"

@ -27,34 +27,34 @@ sleep 30
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" > "$results_directory/fib10.csv"
echo "fib(10) Complete" echo "fib(10) Complete"
sleep 60 sleep 60
hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" > "$results_directory/fib40.csv"
echo "fib(40) Complete" echo "fib(40) Complete"
sleep 120 sleep 120
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 3 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 3 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
echo "fib(10) & fib(40) Complete" echo "fib(10) & fib(40) Complete"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
durations_s=(15 15 15 25) durations_s=(15 15 15 25)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
duration=${durations_s[$i]} duration=${durations_s[$i]}
@ -63,20 +63,20 @@ for payload in ${payloads[*]}; do
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -92,7 +92,7 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -100,8 +100,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -11,16 +11,16 @@ results_directory="$experiment_directory/res/$timestamp"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p998,p999,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p998,p999,p100\n" >> "$results_directory/latency.csv"
durations_s=(15 15 15 25) durations_s=(15 15 15 25)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
duration=${durations_s[$i]} duration=${durations_s[$i]}
@ -29,20 +29,20 @@ for payload in ${payloads[*]}; do
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -62,7 +62,7 @@ for payload in ${payloads[*]}; do
NR==p998 {printf "%1.4f,", $0} NR==p998 {printf "%1.4f,", $0}
NR==p999 {printf "%1.4f,", $0} NR==p999 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -70,8 +70,8 @@ done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -15,15 +15,15 @@ for scheduler in ${schedulers[*]}; do
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
@ -40,13 +40,13 @@ for scheduler in ${schedulers[*]}; do
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
@ -55,9 +55,9 @@ for scheduler in ${schedulers[*]}; do
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
@ -68,27 +68,27 @@ for scheduler in ${schedulers[*]}; do
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -104,7 +104,7 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -112,8 +112,8 @@ for scheduler in ${schedulers[*]}; do
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -15,15 +15,15 @@ for scheduler in ${schedulers[*]}; do
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
@ -40,13 +40,13 @@ for scheduler in ${schedulers[*]}; do
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
@ -55,9 +55,9 @@ for scheduler in ${schedulers[*]}; do
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
@ -68,27 +68,27 @@ for scheduler in ${schedulers[*]}; do
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
@ -104,7 +104,7 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100} NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
@ -112,8 +112,8 @@ for scheduler in ${schedulers[*]}; do
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -5,15 +5,14 @@ ITERS=$3
# before running this benchmark, # before running this benchmark,
# copy fibonacci to fibonacci_native.out # copy fibonacci to fibonacci_native.out
testeach() testeach() {
{
tmp_cnt=${ITERS} tmp_cnt=${ITERS}
exe_relpath=$1 exe_relpath=$1
echo "${exe_relpath} ($2) for ${tmp_cnt}" echo "${exe_relpath} ($2) for ${tmp_cnt}"
while [ ${tmp_cnt} -gt 0 ]; do while [ ${tmp_cnt} -gt 0 ]; do
bench=$(echo $2 | $exe_relpath 2>/dev/null) bench=$(echo $2 | $exe_relpath 2> /dev/null)
tmp_cnt=$((tmp_cnt - 1)) tmp_cnt=$((tmp_cnt - 1))
echo "$bench" echo "$bench"
done done

Loading…
Cancel
Save