chore: Apply shfmt to shell scripts

main
Sean McBride 4 years ago
parent 66482d012c
commit de5ee9af53

@ -69,5 +69,7 @@
"C_Cpp.files.exclude": { "C_Cpp.files.exclude": {
"awsm/wasmception": true, "awsm/wasmception": true,
"**/.vscode": true "**/.vscode": true
} },
"shellformat.flag": "-ln=bash -i 0 -bn -ci -sr -kp"
} }

@ -29,91 +29,91 @@ SYS_BUILD_TIMEOUT=0
# Provides help to user on how to use this script # Provides help to user on how to use this script
usage() { usage() {
echo "usage $0 <setup||run||stop||rm||rma/>" echo "usage $0 <setup||run||stop||rm||rma/>"
echo " setup Build a sledge runtime container and sledge-dev, a build container with toolchain needed to compile your own functions" echo " setup Build a sledge runtime container and sledge-dev, a build container with toolchain needed to compile your own functions"
echo " run Start the sledge Docker image as an interactive container with this repository mounted" echo " run Start the sledge Docker image as an interactive container with this repository mounted"
echo " stop Stop and remove the sledge Docker container after use" echo " stop Stop and remove the sledge Docker container after use"
echo " rm Remove the sledge runtime container and image, but leaves the sledge-dev container in place" echo " rm Remove the sledge runtime container and image, but leaves the sledge-dev container in place"
echo " rma Removes all the sledge and sledge-dev containers and images" echo " rma Removes all the sledge and sledge-dev containers and images"
} }
# Given a number of seconds, initiates a countdown sequence # Given a number of seconds, initiates a countdown sequence
countdown() { countdown() {
tmp_cnt=$1 tmp_cnt=$1
while [ "${tmp_cnt}" -gt 0 ]; do while [ "${tmp_cnt}" -gt 0 ]; do
printf "%d." "${tmp_cnt}" printf "%d." "${tmp_cnt}"
sleep 1 sleep 1
tmp_cnt=$((tmp_cnt - 1)) tmp_cnt=$((tmp_cnt - 1))
done done
echo echo
} }
# Build and runs the build container sledge-dev and then executes make install on the project # Build and runs the build container sledge-dev and then executes make install on the project
# Finally "forks" the sledge-dev build container into the sledge execution container # Finally "forks" the sledge-dev build container into the sledge execution container
envsetup() { envsetup() {
# I want to create this container before the Makefile executes so that my user owns it # I want to create this container before the Makefile executes so that my user owns it
# This allows me to execute the sledgert binary from my local host # This allows me to execute the sledgert binary from my local host
mkdir -p "$HOST_ROOT/runtime/bin" mkdir -p "$HOST_ROOT/runtime/bin"
# Check to see if the sledge:latest image exists, exiting if it does # Check to see if the sledge:latest image exists, exiting if it does
# Because sledge:latest is "forked" after completing envsetup, this suggests that envsetup was already run # Because sledge:latest is "forked" after completing envsetup, this suggests that envsetup was already run
if docker image inspect ${SYS_DOC_NAMETAG} 1>/dev/null 2>/dev/null; then if docker image inspect ${SYS_DOC_NAMETAG} 1> /dev/null 2> /dev/null; then
echo "${SYS_DOC_NAMETAG} image exists, which means that 'devenv.sh setup' already ran to completion!" echo "${SYS_DOC_NAMETAG} image exists, which means that 'devenv.sh setup' already ran to completion!"
echo "If you are explicitly trying to rebuild SLEdge, run the following:" echo "If you are explicitly trying to rebuild SLEdge, run the following:"
echo "devenv.sh rma | Removes the images sledge:latest AND sledge-dev:latest" echo "devenv.sh rma | Removes the images sledge:latest AND sledge-dev:latest"
exit 1 exit 1
fi fi
echo "Setting up ${SYS_NAME}" echo "Setting up ${SYS_NAME}"
echo "Updating git submodules" echo "Updating git submodules"
git submodule update --init --recursive 2>/dev/null || :d git submodule update --init --recursive 2> /dev/null || :d
echo "Using Dockerfile.$(uname -m)" echo "Using Dockerfile.$(uname -m)"
rm -f Dockerfile rm -f Dockerfile
ln -s Dockerfile.$(uname -m) Dockerfile ln -s Dockerfile.$(uname -m) Dockerfile
# As a user nicety, warn the user if sledge-dev is detected # As a user nicety, warn the user if sledge-dev is detected
# This UX differs from detecting sledge, which immediately exits # This UX differs from detecting sledge, which immediately exits
# This is disabled because it doesn't seem useful # This is disabled because it doesn't seem useful
if if
docker image inspect "${SYS_DOC_DEVNAMETAG}" 1>/dev/null 2>/dev/null && [ $SYS_BUILD_TIMEOUT -gt 0 ] docker image inspect "${SYS_DOC_DEVNAMETAG}" 1> /dev/null 2> /dev/null && [ $SYS_BUILD_TIMEOUT -gt 0 ]
then then
echo "${SYS_DOC_DEVNAME} image exists, rebuilding it" echo "${SYS_DOC_DEVNAME} image exists, rebuilding it"
echo "(you have ${SYS_BUILD_TIMEOUT}secs to stop the rebuild)" echo "(you have ${SYS_BUILD_TIMEOUT}secs to stop the rebuild)"
countdown ${SYS_BUILD_TIMEOUT} countdown ${SYS_BUILD_TIMEOUT}
fi fi
# Build the image sledge-dev:latest # Build the image sledge-dev:latest
echo "Building ${SYS_DOC_DEVNAMETAG}" echo "Building ${SYS_DOC_DEVNAMETAG}"
docker build --tag "${SYS_DOC_DEVNAMETAG}" . docker build --tag "${SYS_DOC_DEVNAMETAG}" .
# Run the sledge-dev:latest image as a background container named sledge-dev with the project directly mounted at /sledge # Run the sledge-dev:latest image as a background container named sledge-dev with the project directly mounted at /sledge
echo "Creating the build container ${SYS_DOC_NAMETAG} from the image ${SYS_DOC_DEVNAMETAG}" echo "Creating the build container ${SYS_DOC_NAMETAG} from the image ${SYS_DOC_DEVNAMETAG}"
docker run \ docker run \
--privileged \ --privileged \
--name=${SYS_DOC_DEVNAME} \ --name=${SYS_DOC_DEVNAME} \
--detach \ --detach \
--mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \ --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
"${SYS_DOC_DEVNAMETAG}" /bin/sleep 99999999 >/dev/null "${SYS_DOC_DEVNAMETAG}" /bin/sleep 99999999 > /dev/null
# Execute the make install command on the sledge-dev image to build the project # Execute the make install command on the sledge-dev image to build the project
echo "Building ${SYS_NAME}" echo "Building ${SYS_NAME}"
docker exec \ docker exec \
--tty \ --tty \
--workdir "${HOST_SYS_MOUNT}" \ --workdir "${HOST_SYS_MOUNT}" \
${SYS_DOC_DEVNAME} make install ${SYS_DOC_DEVNAME} make install
# Create the image sledge:latest from the current state of docker-dev # Create the image sledge:latest from the current state of docker-dev
echo "Tagging the new image" echo "Tagging the new image"
docker container commit ${SYS_DOC_DEVNAME} ${SYS_DOC_NAMETAG} docker container commit ${SYS_DOC_DEVNAME} ${SYS_DOC_NAMETAG}
# Kill and remove the running sledge-dev container # Kill and remove the running sledge-dev container
echo "Cleaning up ${SYS_DOC_DEVNAME}" echo "Cleaning up ${SYS_DOC_DEVNAME}"
docker kill ${SYS_DOC_DEVNAME} docker kill ${SYS_DOC_DEVNAME}
docker rm ${SYS_DOC_DEVNAME} docker rm ${SYS_DOC_DEVNAME}
echo "Done!" echo "Done!"
} }
# Executes an interactive BASH shell in the sledge container with /sledge as the working directory # Executes an interactive BASH shell in the sledge container with /sledge as the working directory
@ -121,75 +121,75 @@ envsetup() {
# If the image sledge:latest does not exist, automatically runs envsetup to build sledge and create it # If the image sledge:latest does not exist, automatically runs envsetup to build sledge and create it
# If the a container names sledge is not running, starts it from sledge:latest, mounting the SLEdge project directory to /sledge # If the a container names sledge is not running, starts it from sledge:latest, mounting the SLEdge project directory to /sledge
envrun() { envrun() {
if ! docker image inspect ${SYS_DOC_NAMETAG} >/dev/null; then if ! docker image inspect ${SYS_DOC_NAMETAG} > /dev/null; then
envsetup envsetup
fi fi
if docker ps -f name=${SYS_DOC_NAME} --format '{{.Names}}' | grep -q "^${SYS_DOC_NAME}"; then if docker ps -f name=${SYS_DOC_NAME} --format '{{.Names}}' | grep -q "^${SYS_DOC_NAME}"; then
echo "Container is running" >&2 echo "Container is running" >&2
else else
echo "Starting ${SYS_DOC_NAME}" echo "Starting ${SYS_DOC_NAME}"
docker run \ docker run \
--privileged \ --privileged \
--security-opt seccomp:unconfined \ --security-opt seccomp:unconfined \
--name=${SYS_DOC_NAME} \ --name=${SYS_DOC_NAME} \
--detach \ --detach \
--mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \ --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
${SYS_DOC_NAMETAG} /bin/sleep 99999999 >/dev/null ${SYS_DOC_NAMETAG} /bin/sleep 99999999 > /dev/null
fi fi
echo "Running shell" echo "Running shell"
docker exec --tty --interactive --workdir "${HOST_SYS_MOUNT}" ${SYS_DOC_NAME} /bin/bash docker exec --tty --interactive --workdir "${HOST_SYS_MOUNT}" ${SYS_DOC_NAME} /bin/bash
} }
# Stops and removes the sledge "runtime" container # Stops and removes the sledge "runtime" container
envstop() { envstop() {
echo "Stopping container" echo "Stopping container"
docker stop ${SYS_DOC_NAME} docker stop ${SYS_DOC_NAME}
echo "Removing container" echo "Removing container"
docker rm ${SYS_DOC_NAME} docker rm ${SYS_DOC_NAME}
} }
# Stops and removes the sledge "runtime" container and then removes the sledge "runtime" image # Stops and removes the sledge "runtime" container and then removes the sledge "runtime" image
envrm() { envrm() {
envstop envstop
docker rmi ${SYS_DOC_NAME} docker rmi ${SYS_DOC_NAME}
} }
# Stops and removes the sledge "runtime" container and image and then removes the sledge-dev "build image" image # Stops and removes the sledge "runtime" container and image and then removes the sledge-dev "build image" image
envrma() { envrma() {
envrm envrm
docker rmi ${SYS_DOC_DEVNAME} docker rmi ${SYS_DOC_DEVNAME}
} }
if [ $# -ne 1 ]; then if [ $# -ne 1 ]; then
echo "incorrect number of arguments: $*" echo "incorrect number of arguments: $*"
usage "$0" usage "$0"
exit 1 exit 1
fi fi
case $1 in case $1 in
run) run)
envrun envrun
;; ;;
stop) stop)
envstop envstop
;; ;;
setup) setup)
envsetup envsetup
;; ;;
rm) rm)
envrm envrm
;; ;;
rma) rma)
envrma envrma
;; ;;
*) *)
echo "invalid option: $1" echo "invalid option: $1"
usage "$0" usage "$0"
exit 1 exit 1
;; ;;
esac esac
echo echo
echo "done!" echo "done!"

@ -2,7 +2,7 @@
validate() { validate() {
utility="clang-format" utility="clang-format"
utility_version="$("$utility" --version 2>/dev/null)" || { utility_version="$("$utility" --version 2> /dev/null)" || {
echo "$utility not found in path!" echo "$utility not found in path!"
exit 1 exit 1
} }
@ -44,19 +44,19 @@ help() {
dry_run() { dry_run() {
find runtime \ find runtime \
\( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \ \( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \
-type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print | -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print \
xargs clang-format -Werror -n -ferror-limit=0 | xargs clang-format -Werror -n -ferror-limit=0
} }
format() { format() {
find runtime \ find runtime \
\( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \ \( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \
-type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print | -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print \
xargs clang-format -i | xargs clang-format -i
} }
case $1 in case $1 in
"-h" | "--help") help ;; "-h" | "--help") help ;;
"-d" | "--dry-run") validate && dry_run ;; "-d" | "--dry-run") validate && dry_run ;;
"") validate && format ;; "") validate && format ;;
esac esac

@ -26,29 +26,29 @@ echo "Setting up toolchain environment"
for last_arg in "$@"; do :; done for last_arg in "$@"; do :; done
if [[ $last_arg == "-d" ]] || [[ $last_arg == "--dry-run" ]]; then if [[ $last_arg == "-d" ]] || [[ $last_arg == "--dry-run" ]]; then
DRY_RUN=true DRY_RUN=true
else else
DRY_RUN=false DRY_RUN=false
fi fi
if $DRY_RUN; then if $DRY_RUN; then
DRY_RUN_PREFIX=echo DRY_RUN_PREFIX=echo
else else
DRY_RUN_PREFIX= DRY_RUN_PREFIX=
fi fi
# Get the absolute path of the topmost project directly # Get the absolute path of the topmost project directly
# The use of dirname is particular. It seems unneccesary how this script is run # The use of dirname is particular. It seems unneccesary how this script is run
SYS_SRC_PREFIX=${SYS_SRC_PREFIX:-"$( SYS_SRC_PREFIX=${SYS_SRC_PREFIX:-"$(
cd "$(dirname "$(dirname "${0}")")" || exit 1 cd "$(dirname "$(dirname "${0}")")" || exit 1
pwd -P pwd -P
)"} )"}
$DRY_RUN && echo SYS_SRC_PREFIX: "$SYS_SRC_PREFIX" $DRY_RUN && echo SYS_SRC_PREFIX: "$SYS_SRC_PREFIX"
# And check for the presence of this script to make sure we got it right # And check for the presence of this script to make sure we got it right
if [ ! -x "${SYS_SRC_PREFIX}/install.sh" ]; then if [ ! -x "${SYS_SRC_PREFIX}/install.sh" ]; then
echo "Unable to find the install script" >&2 echo "Unable to find the install script" >&2
exit 1 exit 1
fi fi
SYS_NAME='sledge' SYS_NAME='sledge'
@ -79,21 +79,21 @@ $DRY_RUN && echo SYS_LIB_DIR: "$SYS_LIB_DIR"
# The default is wasmception # The default is wasmception
# Currently, WASI is not actually supported by the runtime. # Currently, WASI is not actually supported by the runtime.
if [ $# -eq 0 ] || [ "$1" = "wasmception" ]; then if [ $# -eq 0 ] || [ "$1" = "wasmception" ]; then
echo "Setting up for wasmception" echo "Setting up for wasmception"
WASM_PREFIX=${WASM_PREFIX:-"${SYS_SRC_PREFIX}/${COMPILER}/wasmception"} WASM_PREFIX=${WASM_PREFIX:-"${SYS_SRC_PREFIX}/${COMPILER}/wasmception"}
WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/dist/bin"} WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/dist/bin"}
WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/sysroot"} WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/sysroot"}
WASM_TARGET=${WASM_TARGET:-"wasm32-unknown-unknown-wasm"} WASM_TARGET=${WASM_TARGET:-"wasm32-unknown-unknown-wasm"}
WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"} WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"}
WASM_TOOLS=(ar) WASM_TOOLS=(ar)
elif [ "$1" = "wasi" ]; then elif [ "$1" = "wasi" ]; then
echo "Setting up for wasi-sdk" echo "Setting up for wasi-sdk"
WASM_PREFIX=${WASM_PREFIX:-${WASM_SDK:-"/opt/wasi-sdk"}} WASM_PREFIX=${WASM_PREFIX:-${WASM_SDK:-"/opt/wasi-sdk"}}
WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/bin"} WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/bin"}
WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/share/sysroot"} WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/share/sysroot"}
WASM_TARGET=${WASM_TARGET:-"wasm32-wasi"} WASM_TARGET=${WASM_TARGET:-"wasm32-wasi"}
WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"} WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"}
WASM_TOOLS=(ar dwarfdump nm ranlib size) WASM_TOOLS=(ar dwarfdump nm ranlib size)
fi fi
$DRY_RUN && echo WASM_PREFIX: "$WASM_PREFIX" $DRY_RUN && echo WASM_PREFIX: "$WASM_PREFIX"
$DRY_RUN && echo WASM_BIN: "$WASM_BIN" $DRY_RUN && echo WASM_BIN: "$WASM_BIN"
@ -117,27 +117,27 @@ $DRY_RUN_PREFIX ln -sfv "${SYS_COMPILER_REL_DIR}/${COMPILER_EXECUTABLE}" "${SYS_
# For example, when wasmception is set, calling `wasm32-unknown-unknown-wasm-clang` results in # For example, when wasmception is set, calling `wasm32-unknown-unknown-wasm-clang` results in
# `exec "/sledge/awsm/wasmception/dist/bin/clang" --target="wasm32-unknown-unknown-wasm" --sysroot="/sledge/awsm/wasmception/sysroot" "$@"` # `exec "/sledge/awsm/wasmception/dist/bin/clang" --target="wasm32-unknown-unknown-wasm" --sysroot="/sledge/awsm/wasmception/sysroot" "$@"`
for file in clang clang++; do for file in clang clang++; do
wrapper_file="$(mktemp)" wrapper_file="$(mktemp)"
cat >"$wrapper_file" <<EOT cat > "$wrapper_file" << EOT
#! /bin/sh #! /bin/sh
exec "${WASM_BIN}/${file}" --target="$WASM_TARGET" --sysroot="$WASM_SYSROOT" "\$@" exec "${WASM_BIN}/${file}" --target="$WASM_TARGET" --sysroot="$WASM_SYSROOT" "\$@"
EOT EOT
cat "$wrapper_file" cat "$wrapper_file"
$DRY_RUN_PREFIX install -p -v "$wrapper_file" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}" $DRY_RUN_PREFIX install -p -v "$wrapper_file" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
$DRY_RUN && echo rm -f "$wrapper_file" $DRY_RUN && echo rm -f "$wrapper_file"
rm -f "$wrapper_file" rm -f "$wrapper_file"
done done
# Link the LLVM Tools with the proper prefix # Link the LLVM Tools with the proper prefix
for file in "${WASM_TOOLS[@]}"; do for file in "${WASM_TOOLS[@]}"; do
$DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/llvm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}" $DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/llvm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
done done
# Link any other tools with the proper prefix # Link any other tools with the proper prefix
OTHER_TOOLS=(ld) OTHER_TOOLS=(ld)
for file in "${OTHER_TOOLS[@]}"; do for file in "${OTHER_TOOLS[@]}"; do
$DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/wasm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}" $DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/wasm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
done done
# Link clang as gcc if needed # Link clang as gcc if needed

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -14,46 +14,46 @@ did_pass=true
# Copy data if not here # Copy data if not here
if [[ ! -f "./initial_state.dat" ]]; then if [[ ! -f "./initial_state.dat" ]]; then
cp $runtime_directory/tests/TinyEKF/extras/c/ekf_raw.dat ./initial_state.dat cp $runtime_directory/tests/TinyEKF/extras/c/ekf_raw.dat ./initial_state.dat
fi fi
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
success_count=0 success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@initial_state.dat" localhost:10000 2>/dev/null >./one_iteration_res.dat curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@initial_state.dat" localhost:10000 2> /dev/null > ./one_iteration_res.dat
curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@one_iteration_res.dat" localhost:10001 2>/dev/null >./two_iterations_res.dat curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@one_iteration_res.dat" localhost:10001 2> /dev/null > ./two_iterations_res.dat
curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@two_iterations_res.dat" localhost:10002 2>/dev/null >./three_iterations_res.dat curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@two_iterations_res.dat" localhost:10002 2> /dev/null > ./three_iterations_res.dat
if diff -s one_iteration_res.dat one_iteration.dat && diff -s two_iterations_res.dat two_iterations.dat && diff -s three_iterations_res.dat three_iterations.dat; then if diff -s one_iteration_res.dat one_iteration.dat && diff -s two_iterations_res.dat two_iterations.dat && diff -s three_iterations_res.dat three_iterations.dat; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
rm *_res.dat rm *_res.dat
else else
echo "FAIL" echo "FAIL"
did_pass=false did_pass=false
rm *_res.dat rm *_res.dat
break break
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
if $did_pass; then if $did_pass; then
exit 0 exit 0
else else
exit 1 exit 1
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -12,44 +12,44 @@ did_pass=true
# Copy data if not here # Copy data if not here
if [[ ! -f "./ekf_raw.dat" ]]; then if [[ ! -f "./ekf_raw.dat" ]]; then
cp ../../../tests/TinyEKF/extras/c/ekf_raw.dat ./ekf_raw.dat cp ../../../tests/TinyEKF/extras/c/ekf_raw.dat ./ekf_raw.dat
fi fi
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
expected_result="$(tr -d '\0' <./expected_result.dat)" expected_result="$(tr -d '\0' < ./expected_result.dat)"
success_count=0 success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result="$(curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@ekf_raw.dat" localhost:10000 2>/dev/null | tr -d '\0')" result="$(curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@ekf_raw.dat" localhost:10000 2> /dev/null | tr -d '\0')"
if [[ "$expected_result" == "$result" ]]; then if [[ "$expected_result" == "$result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
else else
echo "FAIL" echo "FAIL"
did_pass=false did_pass=false
break break
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
if $did_pass; then if $did_pass; then
exit 0 exit 0
else else
exit 1 exit 1
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -15,10 +15,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
# fi # fi
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
# expected_size="$(find expected_result.jpg -printf "%s")" # expected_size="$(find expected_result.jpg -printf "%s")"
@ -31,39 +31,39 @@ file_type=bmp
# file_type=png # file_type=png
for class in airplane automobile bird cat deer dog frog horse ship truck; do for class in airplane automobile bird cat deer dog frog horse ship truck; do
for instance in 1 2 3 4 5 6 7 8 9 10; do for instance in 1 2 3 4 5 6 7 8 9 10; do
echo "Classifying $class$instance.$file_type" echo "Classifying $class$instance.$file_type"
curl -H 'Expect:' -H "Content-Type: Image/$file_type" --data-binary "@images/$file_type/$class$instance.$file_type" localhost:10000 2>/dev/null curl -H 'Expect:' -H "Content-Type: Image/$file_type" --data-binary "@images/$file_type/$class$instance.$file_type" localhost:10000 2> /dev/null
done done
done done
exit exit
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
ext="$RANDOM" ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@plate.jpg" --output "result_$ext.jpg" localhost:10000 2>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@plate.jpg" --output "result_$ext.jpg" localhost:10000 2> /dev/null
actual_size="$(find result_"$ext".jpg -printf "%s")" actual_size="$(find result_"$ext".jpg -printf "%s")"
# echo "$result" # echo "$result"
if [[ "$expected_size" == "$actual_size" ]]; then if [[ "$expected_size" == "$actual_size" ]]; then
echo "SUCCESS $success_count" echo "SUCCESS $success_count"
else else
echo "FAIL" echo "FAIL"
echo "Expected Size:" echo "Expected Size:"
echo "$expected_size" echo "$expected_size"
echo "===============================================" echo "==============================================="
echo "Actual Size:" echo "Actual Size:"
echo "$actual_size" echo "$actual_size"
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm result_*.jpg rm result_*.jpg
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -2,7 +2,7 @@
# Installs the deps needed for run.sh # Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then if [ "$(whoami)" == "root" ]; then
apt-get install imagemagick apt-get install imagemagick
else else
sudo apt-get install imagemagick sudo apt-get install imagemagick
fi fi

@ -11,54 +11,54 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
success_count=0 success_count=0
total_count=100 total_count=100
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
ext="$RANDOM" ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_small.jpg" --output "result_${ext}_small.png" localhost:10000 2>/dev/null 1>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_small.jpg" --output "result_${ext}_small.png" localhost:10000 2> /dev/null 1> /dev/null
pixel_differences="$(compare -identify -metric AE "result_${ext}_small.png" expected_result_small.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_${ext}_small.png" expected_result_small.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" != "0" ]]; then if [[ "$pixel_differences" != "0" ]]; then
echo "Small FAIL" echo "Small FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
exit 1 exit 1
fi fi
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_medium.jpg" --output "result_${ext}_medium.png" localhost:10001 2>/dev/null 1>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_medium.jpg" --output "result_${ext}_medium.png" localhost:10001 2> /dev/null 1> /dev/null
pixel_differences="$(compare -identify -metric AE "result_${ext}_medium.png" expected_result_medium.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_${ext}_medium.png" expected_result_medium.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" != "0" ]]; then if [[ "$pixel_differences" != "0" ]]; then
echo "Small FAIL" echo "Small FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
exit 1 exit 1
fi fi
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_large.jpg" --output "result_${ext}_large.png" localhost:10002 2>/dev/null 1>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_large.jpg" --output "result_${ext}_large.png" localhost:10002 2> /dev/null 1> /dev/null
pixel_differences="$(compare -identify -metric AE "result_${ext}_large.png" expected_result_large.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_${ext}_large.png" expected_result_large.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" != "0" ]]; then if [[ "$pixel_differences" != "0" ]]; then
echo "Small FAIL" echo "Small FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
exit 1 exit 1
fi fi
success_count=$((success_count + 1)) success_count=$((success_count + 1))
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
rm -f result_*.png rm -f result_*.png
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
exit 0 exit 0

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -2,7 +2,7 @@
# Installs the deps needed for run.sh # Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then if [ "$(whoami)" == "root" ]; then
apt-get install imagemagick apt-get install imagemagick
else else
sudo apt-get install imagemagick sudo apt-get install imagemagick
fi fi

@ -11,43 +11,43 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
# Copy Flower Image if not here # Copy Flower Image if not here
if [[ ! -f "./flower.jpg" ]]; then if [[ ! -f "./flower.jpg" ]]; then
cp ../../../../tests/sod/bin/flower.jpg ./flower.jpg cp ../../../../tests/sod/bin/flower.jpg ./flower.jpg
fi fi
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
success_count=0 success_count=0
total_count=10 total_count=10
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
ext="$RANDOM" ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@flower.jpg" --output "result_$ext.png" localhost:10000 2>/dev/null 1>/dev/null || exit 1 curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@flower.jpg" --output "result_$ext.png" localhost:10000 2> /dev/null 1> /dev/null || exit 1
pixel_differences="$(compare -identify -metric AE "result_$ext.png" expected_result.png null: 2>&1 >/dev/null)" pixel_differences="$(compare -identify -metric AE "result_$ext.png" expected_result.png null: 2>&1 > /dev/null)"
if [[ "$pixel_differences" == "0" ]]; then if [[ "$pixel_differences" == "0" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
else else
echo "FAIL" echo "FAIL"
echo "$pixel_differences pixel differences detected" echo "$pixel_differences pixel differences detected"
exit 1 exit 1
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
rm result_*.png rm result_*.png
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
exit 0 exit 0

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
one_plate=(Cars0 Cars1 Cars2 Cars3 Cars4) one_plate=(Cars0 Cars1 Cars2 Cars3 Cars4)
@ -22,21 +22,21 @@ two_plates=(Cars71 Cars87 Cars143 Cars295 Cars316)
four_plates=(Cars106 Cars146 Cars249 Cars277 Cars330) four_plates=(Cars106 Cars146 Cars249 Cars277 Cars330)
for image in ${one_plate[*]}; do for image in ${one_plate[*]}; do
echo "@./1/${image}.png" echo "@./1/${image}.png"
curl --data-binary "@./1/${image}.png" --output - localhost:10000 curl --data-binary "@./1/${image}.png" --output - localhost:10000
done done
for image in ${two_plates[*]}; do for image in ${two_plates[*]}; do
echo "@./2/${image}.png" echo "@./2/${image}.png"
curl --data-binary "@./2/${image}.png" --output - localhost:10001 curl --data-binary "@./2/${image}.png" --output - localhost:10001
done done
for image in ${four_plates[*]}; do for image in ${four_plates[*]}; do
echo "@./4/${image}.png" echo "@./4/${image}.png"
curl --data-binary "@./4/${image}.png" --output - localhost:10002 curl --data-binary "@./4/${image}.png" --output - localhost:10002
done done
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -2,7 +2,7 @@
# Installs the deps needed for run.sh # Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then if [ "$(whoami)" == "root" ]; then
apt-get install netpbm pango1.0-tools wamerican apt-get install netpbm pango1.0-tools wamerican
else else
sudo apt-get install netpbm pango1.0-tools wamerican sudo apt-get install netpbm pango1.0-tools wamerican
fi fi

@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
word_count=100 word_count=100
@ -28,28 +28,28 @@ dpi_to_port[144]=10002
total_count=100 total_count=100
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
words="$(shuf -n"$word_count" /usr/share/dict/american-english)" words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
for dpi in "${dpis[@]}"; do for dpi in "${dpis[@]}"; do
echo "${dpi}"_dpi.pnm echo "${dpi}"_dpi.pnm
pango-view --dpi=$dpi --font=mono -qo "${dpi}"_dpi.png -t "$words" pango-view --dpi=$dpi --font=mono -qo "${dpi}"_dpi.png -t "$words"
pngtopnm "${dpi}"_dpi.png >"${dpi}"_dpi.pnm pngtopnm "${dpi}"_dpi.png > "${dpi}"_dpi.pnm
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${dpi}"_dpi.pnm localhost:${dpi_to_port[$dpi]} 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${dpi}"_dpi.pnm localhost:${dpi_to_port[$dpi]} 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
echo "===============================================" echo "==============================================="
done done
done done
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 2 sleep 2
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm ./*.png ./*.pnm rm ./*.png ./*.pnm
pkill --signal sigterm sledgert >/dev/null 2>/dev/null pkill --signal sigterm sledgert > /dev/null 2> /dev/null
sleep 2 sleep 2
pkill sledgert -9 >/dev/null 2>/dev/null pkill sledgert -9 > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -2,7 +2,7 @@
# Installs the deps needed for run.sh # Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then if [ "$(whoami)" == "root" ]; then
apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu
else else
sudo apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu sudo apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu
fi fi

@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
word_count=100 word_count=100
@ -22,45 +22,45 @@ fonts=("DejaVu Sans Mono" "Roboto" "Cascadia Code")
total_count=10 total_count=10
for ((i = 1; i <= total_count; i++)); do for ((i = 1; i <= total_count; i++)); do
echo "Test $i" echo "Test $i"
words="$(shuf -n"$word_count" /usr/share/dict/american-english)" words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
for font in "${fonts[@]}"; do for font in "${fonts[@]}"; do
# For whatever reason, templating in multiple word strips was a pain, so brute forcing # For whatever reason, templating in multiple word strips was a pain, so brute forcing
case "$font" in case "$font" in
"DejaVu Sans Mono") "DejaVu Sans Mono")
echo "DejaVu Sans Mono" echo "DejaVu Sans Mono"
pango-view --font="DejaVu Sans Mono" -qo mono_words.png -t "$words" || exit 1 pango-view --font="DejaVu Sans Mono" -qo mono_words.png -t "$words" || exit 1
pngtopnm mono_words.png >mono_words.pnm || exit 1 pngtopnm mono_words.png > mono_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @mono_words.pnm localhost:10000 2>/dev/null) result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @mono_words.pnm localhost:10000 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
;; ;;
"Roboto") "Roboto")
echo "Roboto" echo "Roboto"
pango-view --font="Roboto" -qo Roboto_words.png -t "$words" || exit 1 pango-view --font="Roboto" -qo Roboto_words.png -t "$words" || exit 1
pngtopnm Roboto_words.png >Roboto_words.pnm || exit 1 pngtopnm Roboto_words.png > Roboto_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Roboto_words.pnm localhost:10002 2>/dev/null) result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Roboto_words.pnm localhost:10002 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
;; ;;
"Cascadia Code") "Cascadia Code")
echo "Cascadia Code" echo "Cascadia Code"
pango-view --font="Cascadia Code" -qo Cascadia_Code_words.png -t "$words" || exit 1 pango-view --font="Cascadia Code" -qo Cascadia_Code_words.png -t "$words" || exit 1
pngtopnm Cascadia_Code_words.png >Cascadia_Code_words.pnm || exit 1 pngtopnm Cascadia_Code_words.png > Cascadia_Code_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Cascadia_Code_words.pnm localhost:10001 2>/dev/null) result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Cascadia_Code_words.pnm localhost:10001 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
;; ;;
esac esac
echo "===============================================" echo "==============================================="
done done
done done
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 2 sleep 2
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm ./*.png ./*.pnm rm ./*.png ./*.pnm
pkill --signal sigterm sledgert >/dev/null 2>/dev/null pkill --signal sigterm sledgert > /dev/null 2> /dev/null
sleep 2 sleep 2
pkill sledgert -9 >/dev/null 2>/dev/null pkill sledgert -9 > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -2,7 +2,7 @@
# Installs the deps needed for run.sh # Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then if [ "$(whoami)" == "root" ]; then
apt-get install netpbm pango1.0-tools wamerican apt-get install netpbm pango1.0-tools wamerican
else else
sudo apt-get install netpbm pango1.0-tools wamerican sudo apt-get install netpbm pango1.0-tools wamerican
fi fi

@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv" log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 & SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
sleep 2 sleep 2
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
word_counts=(1 10 100) word_counts=(1 10 100)
@ -27,28 +27,28 @@ word_count_to_port["100_words.pnm"]=10002
total_count=100 total_count=100
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
for word_count in "${word_counts[@]}"; do for word_count in "${word_counts[@]}"; do
echo "${word_count}"_words.pnm echo "${word_count}"_words.pnm
words="$(shuf -n"$word_count" /usr/share/dict/american-english)" words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
pango-view --font=mono -qo "$word_count"_words.png -t "$words" || exit 1 pango-view --font=mono -qo "$word_count"_words.png -t "$words" || exit 1
pngtopnm "$word_count"_words.png >"$word_count"_words.pnm || exit 1 pngtopnm "$word_count"_words.png > "$word_count"_words.pnm || exit 1
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${word_count}"_words.pnm localhost:${word_count_to_port["$word_count"_words.pnm]} 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${word_count}"_words.pnm localhost:${word_count_to_port["$word_count"_words.pnm]} 2> /dev/null)
diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result") diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
echo "===============================================" echo "==============================================="
done done
done done
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 2 sleep 2
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm ./*.png ./*.pnm rm ./*.png ./*.pnm
pkill --signal sigterm sledgert >/dev/null 2>/dev/null pkill --signal sigterm sledgert > /dev/null 2> /dev/null
sleep 2 sleep 2
pkill sledgert -9 >/dev/null 2>/dev/null pkill sledgert -9 > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -12,10 +12,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
did_pass=true did_pass=true
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
expected_result="$(cat ./expected_result.txt)" expected_result="$(cat ./expected_result.txt)"
@ -24,35 +24,35 @@ success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@5x8.pnm" localhost:10000 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@5x8.pnm" localhost:10000 2> /dev/null)
# echo "$result" # echo "$result"
if [[ "$result" == "$expected_result" ]]; then if [[ "$result" == "$expected_result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
else else
echo "FAIL" echo "FAIL"
echo "Expected:" echo "Expected:"
echo "$expected_result" echo "$expected_result"
echo "===============================================" echo "==============================================="
echo "Was:" echo "Was:"
echo "$result" echo "$result"
did_pass=false did_pass=false
break break
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
if $did_pass; then if $did_pass; then
exit 0 exit 0
else else
exit 1 exit 1
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -12,10 +12,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
did_pass=true did_pass=true
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
expected_result="$(cat ./expected_result.txt)" expected_result="$(cat ./expected_result.txt)"
@ -23,34 +23,34 @@ success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@handwrt1.pnm" localhost:10000 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@handwrt1.pnm" localhost:10000 2> /dev/null)
# echo "$result" # echo "$result"
if [[ "$result" == "$expected_result" ]]; then if [[ "$result" == "$expected_result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
else else
echo "FAIL" echo "FAIL"
echo "Expected:" echo "Expected:"
echo "$expected_result" echo "$expected_result"
echo "===============================================" echo "==============================================="
echo "Was:" echo "Was:"
echo "$result" echo "$result"
did_pass=false did_pass=false
break break
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
if $did_pass; then if $did_pass; then
exit 0 exit 0
else else
exit 1 exit 1
fi fi

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
did_pass=true did_pass=true
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
expected_result="$(cat ./expected_result.txt)" expected_result="$(cat ./expected_result.txt)"
@ -22,34 +22,34 @@ success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@hyde.pnm" localhost:10000 2>/dev/null) result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@hyde.pnm" localhost:10000 2> /dev/null)
# echo "$result" # echo "$result"
if [[ "$result" == "$expected_result" ]]; then if [[ "$result" == "$expected_result" ]]; then
success_count=$((success_count + 1)) success_count=$((success_count + 1))
else else
echo "FAIL" echo "FAIL"
echo "Expected:" echo "Expected:"
echo "$expected_result" echo "$expected_result"
echo "===============================================" echo "==============================================="
echo "Was:" echo "Was:"
echo "$result" echo "$result"
did_pass=false did_pass=false
break break
fi fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi
if $did_pass; then if $did_pass; then
exit 0 exit 0
else else
exit 1 exit 1
fi fi

@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
payloads=(fivebyeight/5x8 handwriting/handwrt1 hyde/hyde) payloads=(fivebyeight/5x8 handwriting/handwrt1 hyde/hyde)
ports=(10000 10001 10002) ports=(10000 10001 10002)
@ -29,7 +29,7 @@ iterations=1000
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for i in {0..2}; do for i in {0..2}; do
hey -n 200 -c 3 -q 200 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" hey -n 200 -c 3 -q 200 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}"
done done
sleep 1 sleep 1
echo "[DONE]" echo "[DONE]"
@ -37,49 +37,49 @@ echo "[DONE]"
# Execute the experiments # Execute the experiments
echo "Running Experiments" echo "Running Experiments"
for i in {0..2}; do for i in {0..2}; do
printf "\t%s Payload: " "${payloads[$i]}" printf "\t%s Payload: " "${payloads[$i]}"
file=$(echo "${payloads[$i]}" | awk -F/ '{print $2}').csv file=$(echo "${payloads[$i]}" | awk -F/ '{print $2}').csv
hey -n "$iterations" -c 3 -cpus 2 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" >"$results_directory/$file" hey -n "$iterations" -c 3 -cpus 2 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" > "$results_directory/$file"
echo "[DONE]" echo "[DONE]"
done done
# Stop the runtime # Stop the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
kill_runtime kill_runtime
fi fi
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Concurrency,Success_Rate\n" >>"$results_directory/success.csv" printf "Concurrency,Success_Rate\n" >> "$results_directory/success.csv"
printf "Concurrency,Throughput\n" >>"$results_directory/throughput.csv" printf "Concurrency,Throughput\n" >> "$results_directory/throughput.csv"
printf "Con,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Con,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Calculate Success Rate for csv # Calculate Success Rate for csv
file=$(echo "$payload" | awk -F/ '{print $2}') file=$(echo "$payload" | awk -F/ '{print $2}')
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$file"',%3.5f\n", (ok / '"$iterations"' * 100)} END{printf "'"$file"',%3.5f\n", (ok / '"$iterations"' * 100)}
' <"$results_directory/$file.csv" >>"$results_directory/success.csv" ' < "$results_directory/$file.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$file.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$file.csv" \
sort -g >"$results_directory/$file-response.csv" | sort -g > "$results_directory/$file-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$file-response.csv") oks=$(wc -l < "$results_directory/$file-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$file.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$file.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$file" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$file" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -92,16 +92,16 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$file-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$file-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
rm -rf "$results_directory/$file-response.csv" rm -rf "$results_directory/$file-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots # Generate gnuplots

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -10,14 +10,14 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
# Copy License Plate Image if not here # Copy License Plate Image if not here
if [[ ! -f "./samples/goforward.raw" ]]; then if [[ ! -f "./samples/goforward.raw" ]]; then
cp ../../../tests/speechtotext/goforward.raw ./samples/goforward.raw cp ../../../tests/speechtotext/goforward.raw ./samples/goforward.raw
fi fi
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
fi fi
# expected_size="$(find expected_result.jpg -printf "%s")" # expected_size="$(find expected_result.jpg -printf "%s")"
@ -25,29 +25,29 @@ success_count=0
total_count=50 total_count=50
for ((i = 0; i < total_count; i++)); do for ((i = 0; i < total_count; i++)); do
echo "$i" echo "$i"
# ext="$RANDOM" # ext="$RANDOM"
curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@goforward.raw" localhost:10000 2>/dev/null curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@goforward.raw" localhost:10000 2> /dev/null
# # echo "$result" # # echo "$result"
# if [[ "$expected_size" == "$actual_size" ]]; then # if [[ "$expected_size" == "$actual_size" ]]; then
# echo "SUCCESS $success_count" # echo "SUCCESS $success_count"
# else # else
# echo "FAIL" # echo "FAIL"
# echo "Expected Size:" # echo "Expected Size:"
# echo "$expected_size" # echo "$expected_size"
# echo "===============================================" # echo "==============================================="
# echo "Actual Size:" # echo "Actual Size:"
# echo "$actual_size" # echo "$actual_size"
# fi # fi
done done
echo "$success_count / $total_count" echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
rm result_*.jpg rm result_*.jpg
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
fi fi

@ -1,47 +1,47 @@
#!/bin/bash #!/bin/bash
log_environment() { log_environment() {
echo "*******" echo "*******"
echo "* Git *" echo "* Git *"
echo "*******" echo "*******"
git log | head -n 1 | cut -d' ' -f2 git log | head -n 1 | cut -d' ' -f2
git status git status
echo "" echo ""
echo "************" echo "************"
echo "* Makefile *" echo "* Makefile *"
echo "************" echo "************"
cat ../../Makefile cat ../../Makefile
echo "" echo ""
echo "**********" echo "**********"
echo "* Run.sh *" echo "* Run.sh *"
echo "**********" echo "**********"
cat run.sh cat run.sh
echo "" echo ""
echo "************" echo "************"
echo "* Hardware *" echo "* Hardware *"
echo "************" echo "************"
lscpu lscpu
echo "" echo ""
echo "*************" echo "*************"
echo "* Execution *" echo "* Execution *"
echo "*************" echo "*************"
} }
kill_runtime() { kill_runtime() {
echo -n "Running Cleanup: " echo -n "Running Cleanup: "
pkill sledgert >/dev/null 2>/dev/null pkill sledgert > /dev/null 2> /dev/null
pkill hey >/dev/null 2>/dev/null pkill hey > /dev/null 2> /dev/null
echo "[DONE]" echo "[DONE]"
} }
generate_gnuplots() { generate_gnuplots() {
cd "$results_directory" || exit cd "$results_directory" || exit
gnuplot ../../latency.gnuplot gnuplot ../../latency.gnuplot
gnuplot ../../success.gnuplot gnuplot ../../success.gnuplot
gnuplot ../../throughput.gnuplot gnuplot ../../throughput.gnuplot
cd "$experiment_directory" || exit cd "$experiment_directory" || exit
} }

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
iterations=10000 iterations=10000
@ -35,47 +35,47 @@ echo "[DONE]"
concurrency=(1 20 40 60 80 100) concurrency=(1 20 40 60 80 100)
echo "Running Experiments" echo "Running Experiments"
for conn in ${concurrency[*]}; do for conn in ${concurrency[*]}; do
printf "\t%d Concurrency: " "$conn" printf "\t%d Concurrency: " "$conn"
hey -n "$iterations" -c "$conn" -cpus 2 -o csv -m GET http://localhost:10000 >"$results_directory/con$conn.csv" hey -n "$iterations" -c "$conn" -cpus 2 -o csv -m GET http://localhost:10000 > "$results_directory/con$conn.csv"
echo "[DONE]" echo "[DONE]"
done done
# Stop the runtime # Stop the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
kill_runtime kill_runtime
fi fi
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Concurrency,Success_Rate\n" >>"$results_directory/success.csv" printf "Concurrency,Success_Rate\n" >> "$results_directory/success.csv"
printf "Concurrency,Throughput\n" >>"$results_directory/throughput.csv" printf "Concurrency,Throughput\n" >> "$results_directory/throughput.csv"
printf "Con,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Con,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for conn in ${concurrency[*]}; do for conn in ${concurrency[*]}; do
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$conn"',%3.5f\n", (ok / '"$iterations"' * 100)} END{printf "'"$conn"',%3.5f\n", (ok / '"$iterations"' * 100)}
' <"$results_directory/con$conn.csv" >>"$results_directory/success.csv" ' < "$results_directory/con$conn.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/con$conn.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/con$conn.csv" \
sort -g >"$results_directory/con$conn-response.csv" | sort -g > "$results_directory/con$conn-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/con$conn-response.csv") oks=$(wc -l < "$results_directory/con$conn-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/con$conn.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/con$conn.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%d,%f\n" "$conn" "$throughput" >>"$results_directory/throughput.csv" printf "%d,%f\n" "$conn" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -88,16 +88,16 @@ for conn in ${concurrency[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/con$conn-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/con$conn-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
rm -rf "$results_directory/con$conn-response.csv" rm -rf "$results_directory/con$conn-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots # Generate gnuplots

@ -13,7 +13,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(40 10) inputs=(40 10)
duration_sec=60 duration_sec=60
@ -22,7 +22,7 @@ offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input)) hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 5 sleep 5
@ -30,7 +30,7 @@ sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 > "$results_directory/fib40-con.csv"
# sleep $offset # sleep $offset
# hey -n 25000 -c 1000000 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" & # hey -n 25000 -c 1000000 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" &
# sleep $((duration_sec + offset + 45)) # sleep $((duration_sec + offset + 45))
@ -38,43 +38,43 @@ hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
# durations_s=(60 70) # durations_s=(60 70)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
for ((i = 1; i < 2; i++)); do for ((i = 1; i < 2; i++)); do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# duration=${durations_s[$i]} # duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# throughput=$(echo "$oks/$duration" | bc) # throughput=$(echo "$oks/$duration" | bc)
# printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" # printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -87,16 +87,16 @@ for ((i = 1; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency; do for file in success latency; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -14,7 +14,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(40 10) inputs=(40 10)
duration_sec=30 duration_sec=30
@ -23,7 +23,7 @@ offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input)) hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 5 sleep 5
@ -31,54 +31,54 @@ sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 3 -c 200 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 3 -c 200 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
sleep 30 sleep 30
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
durations_s=(30 40) durations_s=(30 40)
for ((i = 0; i < 2; i++)); do for ((i = 0; i < 2; i++)); do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {denom++} $7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -91,16 +91,16 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -14,7 +14,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(10) inputs=(10)
duration_sec=30 duration_sec=30
@ -31,52 +31,52 @@ echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
# hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" & # hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" &
# sleep $offset # sleep $offset
hey -z ${duration_sec}s -cpus 6 -c 400 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" hey -z ${duration_sec}s -cpus 6 -c 400 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 > "$results_directory/fib10-con.csv"
# sleep $((duration_sec + offset + 15)) # sleep $((duration_sec + offset + 15))
# sleep 30 # sleep 30
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
durations_s=(30 40) durations_s=(30 40)
for ((i = 0; i < 1; i++)); do for ((i = 0; i < 1; i++)); do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {denom++} $7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -89,16 +89,16 @@ for ((i = 0; i < 1; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -10,42 +10,42 @@ results_directory="$experiment_directory/res/1606615320-fifo-adm"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
for ((i = 0; i < 2; i++)); do for ((i = 0; i < 2; i++)); do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -58,16 +58,16 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -15,7 +15,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
inputs=(40 10) inputs=(40 10)
duration_sec=60 duration_sec=60
@ -41,45 +41,45 @@ offset=5
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000) deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con) payloads=(fib10-con fib40-con)
durations_s=(60 70) durations_s=(60 70)
for ((i = 0; i < 2; i++)); do for ((i = 0; i < 2; i++)); do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {denom++} $7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -92,16 +92,16 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO) schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do for scheduler in ${schedulers[*]}; do
results_directory="$experiment_directory/res/$timestamp/$scheduler" results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
duration_sec=15 duration_sec=15
offset=5 offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input)) hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 5 sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
[ "$1" != "-d" ] && kill_runtime [ "$1" != "-d" ] && kill_runtime
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for ((i = 0; i < 4; i++)); do for ((i = 0; i < 4; i++)); do
# for payload in ${payloads[*]}; do # for payload in ${payloads[*]}; do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined
# generate_gnuplots # generate_gnuplots
# Cleanup, if requires # Cleanup, if requires
echo "[DONE]" echo "[DONE]"
done done

@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO) schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do for scheduler in ${schedulers[*]}; do
results_directory="$experiment_directory/res/$timestamp/$scheduler" results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
duration_sec=15 duration_sec=15
offset=5 offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input)) hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 5 sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
[ "$1" != "-d" ] && kill_runtime [ "$1" != "-d" ] && kill_runtime
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for ((i = 0; i < 4; i++)); do for ((i = 0; i < 4; i++)); do
# for payload in ${payloads[*]}; do # for payload in ${payloads[*]}; do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100} NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined
# generate_gnuplots # generate_gnuplots
# Cleanup, if requires # Cleanup, if requires
echo "[DONE]" echo "[DONE]"
done done

@ -1,12 +1,12 @@
#!/bin/bash #!/bin/bash
# Generates payloads of 1KB, 10KB, 100KB, 1MB # Generates payloads of 1KB, 10KB, 100KB, 1MB
for size in 1024 $((1024 * 10)) $((1024 * 100)) $((1024 * 1024)); do for size in 1024 $((1024 * 10)) $((1024 * 100)) $((1024 * 1024)); do
rm -rf $size.txt rm -rf $size.txt
i=0 i=0
echo -n "Generating $size:" echo -n "Generating $size:"
while ((i < size)); do while ((i < size)); do
printf 'a' >>$size.txt printf 'a' >> $size.txt
((i++)) ((i++))
done done
echo "[DONE]" echo "[DONE]"
done done

@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH" export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
payloads=(1024 10240 102400 1048576) payloads=(1024 10240 102400 1048576)
@ -29,15 +29,15 @@ iterations=10000
# If the one of the expected body files doesn't exist, trigger the generation script. # If the one of the expected body files doesn't exist, trigger the generation script.
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
if test -f "$experiment_directory/body/$payload.txt"; then if test -f "$experiment_directory/body/$payload.txt"; then
continue continue
else else
echo "Generating Payloads: " echo "Generating Payloads: "
{ {
cd "$experiment_directory/body" && ./generate.sh cd "$experiment_directory/body" && ./generate.sh
} }
break break
fi fi
done done
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
@ -52,46 +52,46 @@ echo "[DONE]"
# Execute the experiments # Execute the experiments
echo "Running Experiments" echo "Running Experiments"
for i in {0..3}; do for i in {0..3}; do
printf "\t%d Payload: " "${payloads[$i]}" printf "\t%d Payload: " "${payloads[$i]}"
hey -n "$iterations" -c 1 -cpus 2 -o csv -m GET -D "$experiment_directory/body/${payloads[$i]}.txt" http://localhost:"${ports[$i]}" >"$results_directory/${payloads[$i]}.csv" hey -n "$iterations" -c 1 -cpus 2 -o csv -m GET -D "$experiment_directory/body/${payloads[$i]}.txt" http://localhost:"${ports[$i]}" > "$results_directory/${payloads[$i]}.csv"
echo "[DONE]" echo "[DONE]"
done done
# Stop the runtime # Stop the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
sleep 5 sleep 5
kill_runtime kill_runtime
fi fi
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$payload"',%3.5f\n", (ok / '"$iterations"' * 100)} END{printf "'"$payload"',%3.5f\n", (ok / '"$iterations"' * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%d,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%d,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -104,16 +104,16 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
rm -rf "$results_directory/$payload-response.csv" rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots # Generate gnuplots

@ -12,7 +12,7 @@ results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" | tee -a "$results_directory/$log" PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" | tee -a "$results_directory/$log"

@ -20,66 +20,66 @@ offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -n 45 -c 4 -t 0 -o csv -m GET -d "$input\n" http://"$host":$((10000 + input)) hey -n 45 -c 4 -t 0 -o csv -m GET -d "$input\n" http://"$host":$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 30 sleep 30
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" > "$results_directory/fib10.csv"
echo "fib(10) Complete" echo "fib(10) Complete"
sleep 60 sleep 60
hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" > "$results_directory/fib40.csv"
echo "fib(40) Complete" echo "fib(40) Complete"
sleep 120 sleep 120
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 3 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 3 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
echo "fib(10) & fib(40) Complete" echo "fib(10) & fib(40) Complete"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
durations_s=(15 15 15 25) durations_s=(15 15 15 25)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -92,16 +92,16 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -13,8 +13,8 @@ export PATH="$binary_directory:$PATH"
export SLEDGE_SCHEDULER="EDF" export SLEDGE_SCHEDULER="EDF"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="handle SIGPIPE nostop" \ --eval-command="handle SIGPIPE nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_directory" \ --eval-command="set substitute-path /sledge/runtime $project_directory" \
--eval-command="run $experiment_directory/spec.json" \ --eval-command="run $experiment_directory/spec.json" \
sledgert sledgert

@ -11,41 +11,41 @@ results_directory="$experiment_directory/res/$timestamp"
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p998,p999,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p998,p999,p100\n" >> "$results_directory/latency.csv"
durations_s=(15 15 15 25) durations_s=(15 15 15 25)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for payload in ${payloads[*]}; do for payload in ${payloads[*]}; do
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
duration=${durations_s[$i]} duration=${durations_s[$i]}
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 {ok++} $7 == 200 {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
# duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -62,16 +62,16 @@ for payload in ${payloads[*]}; do
NR==p998 {printf "%1.4f,", $0} NR==p998 {printf "%1.4f,", $0}
NR==p999 {printf "%1.4f,", $0} NR==p999 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined

@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO) schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do for scheduler in ${schedulers[*]}; do
results_directory="$experiment_directory/res/$timestamp/$scheduler" results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
duration_sec=15 duration_sec=15
offset=5 offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input)) hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 5 sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
[ "$1" != "-d" ] && kill_runtime [ "$1" != "-d" ] && kill_runtime
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for ((i = 0; i < 4; i++)); do for ((i = 0; i < 4; i++)); do
# for payload in ${payloads[*]}; do # for payload in ${payloads[*]}; do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f,", $0} NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0} NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0} NR==p100 {printf "%1.4f\n", $0}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined
# generate_gnuplots # generate_gnuplots
# Cleanup, if requires # Cleanup, if requires
echo "[DONE]" echo "[DONE]"
done done

@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO) schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do for scheduler in ${schedulers[*]}; do
results_directory="$experiment_directory/res/$timestamp/$scheduler" results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt log=log.txt
mkdir -p "$results_directory" mkdir -p "$results_directory"
log_environment >>"$results_directory/$log" log_environment >> "$results_directory/$log"
# Start the runtime # Start the runtime
if [ "$1" != "-d" ]; then if [ "$1" != "-d" ]; then
SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" & SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
sleep 1 sleep 1
else else
echo "Running under gdb" echo "Running under gdb"
echo "Running under gdb" >>"$results_directory/$log" echo "Running under gdb" >> "$results_directory/$log"
fi fi
inputs=(40 10) inputs=(40 10)
duration_sec=15 duration_sec=15
offset=5 offset=5
# Execute workloads long enough for runtime to learn excepted execution time # Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: " echo -n "Running Samples: "
for input in ${inputs[*]}; do for input in ${inputs[*]}; do
hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input)) hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
done done
echo "[DONE]" echo "[DONE]"
sleep 5 sleep 5
echo "Running Experiments" echo "Running Experiments"
# Run each separately # Run each separately
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv" hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" & hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
sleep $offset sleep $offset
hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" & hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15)) sleep $((duration_sec + offset + 15))
# Stop the runtime if not in debug mode # Stop the runtime if not in debug mode
[ "$1" != "-d" ] && kill_runtime [ "$1" != "-d" ] && kill_runtime
# Generate *.csv and *.dat results # Generate *.csv and *.dat results
echo -n "Parsing Results: " echo -n "Parsing Results: "
printf "Payload,Success_Rate\n" >>"$results_directory/success.csv" printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
printf "Payload,Throughput\n" >>"$results_directory/throughput.csv" printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv" printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(2 2 3000 3000) deadlines_ms=(2 2 3000 3000)
payloads=(fib10 fib10-con fib40 fib40-con) payloads=(fib10 fib10-con fib40 fib40-con)
for ((i = 0; i < 4; i++)); do for ((i = 0; i < 4; i++)); do
# for payload in ${payloads[*]}; do # for payload in ${payloads[*]}; do
payload=${payloads[$i]} payload=${payloads[$i]}
deadline=${deadlines_ms[$i]} deadline=${deadlines_ms[$i]}
# Get Number of Requests # Get Number of Requests
requests=$(($(wc -l <"$results_directory/$payload.csv") - 1)) requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
((requests == 0)) && continue ((requests == 0)) && continue
# Calculate Success Rate for csv # Calculate Success Rate for csv
awk -F, ' awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++} $7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)} END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
' <"$results_directory/$payload.csv" >>"$results_directory/success.csv" ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
# Filter on 200s, convery from s to ms, and sort # Filter on 200s, convery from s to ms, and sort
awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" | awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
sort -g >"$results_directory/$payload-response.csv" | sort -g > "$results_directory/$payload-response.csv"
# Get Number of 200s # Get Number of 200s
oks=$(wc -l <"$results_directory/$payload-response.csv") oks=$(wc -l < "$results_directory/$payload-response.csv")
((oks == 0)) && continue # If all errors, skip line ((oks == 0)) && continue # If all errors, skip line
# Get Latest Timestamp # Get Latest Timestamp
duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8) duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
throughput=$(echo "$oks/$duration" | bc) throughput=$(echo "$oks/$duration" | bc)
printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv" printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
# Generate Latency Data for csv # Generate Latency Data for csv
awk ' awk '
BEGIN { BEGIN {
sum = 0 sum = 0
p50 = int('"$oks"' * 0.5) p50 = int('"$oks"' * 0.5)
@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100} NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100} NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100}
' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv" ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
# Delete scratch file used for sorting/counting # Delete scratch file used for sorting/counting
# rm -rf "$results_directory/$payload-response.csv" # rm -rf "$results_directory/$payload-response.csv"
done done
# Transform csvs to dat files for gnuplot # Transform csvs to dat files for gnuplot
for file in success latency throughput; do for file in success latency throughput; do
echo -n "#" >"$results_directory/$file.dat" echo -n "#" > "$results_directory/$file.dat"
tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat" tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined # Generate gnuplots. Commented out because we don't have *.gnuplots defined
# generate_gnuplots # generate_gnuplots
# Cleanup, if requires # Cleanup, if requires
echo "[DONE]" echo "[DONE]"
done done

@ -5,15 +5,14 @@ ITERS=$3
# before running this benchmark, # before running this benchmark,
# copy fibonacci to fibonacci_native.out # copy fibonacci to fibonacci_native.out
testeach() testeach() {
{
tmp_cnt=${ITERS} tmp_cnt=${ITERS}
exe_relpath=$1 exe_relpath=$1
echo "${exe_relpath} ($2) for ${tmp_cnt}" echo "${exe_relpath} ($2) for ${tmp_cnt}"
while [ ${tmp_cnt} -gt 0 ]; do while [ ${tmp_cnt} -gt 0 ]; do
bench=$(echo $2 | $exe_relpath 2>/dev/null) bench=$(echo $2 | $exe_relpath 2> /dev/null)
tmp_cnt=$((tmp_cnt - 1)) tmp_cnt=$((tmp_cnt - 1))
echo "$bench" echo "$bench"
done done
@ -25,7 +24,7 @@ MAXNUM=$2
tmp1_cnt=${MAXNUM} tmp1_cnt=${MAXNUM}
while [ ${tmp1_cnt} -gt 28 ]; do while [ ${tmp1_cnt} -gt 28 ]; do
testeach ./fibonacci_$1.out ${tmp1_cnt} testeach ./fibonacci_$1.out ${tmp1_cnt}
tmp1_cnt=$((tmp1_cnt - 1)) tmp1_cnt=$((tmp1_cnt - 1))
done done

@ -5,15 +5,15 @@
# Also disables pagination and stopping on SIGUSR1 # Also disables pagination and stopping on SIGUSR1
declare project_path="$( declare project_path="$(
cd "$(dirname "$1")/../.." cd "$(dirname "$1")/../.."
pwd pwd
)" )"
echo $project_path echo $project_path
cd ../../bin cd ../../bin
export LD_LIBRARY_PATH="$(pwd):$LD_LIBRARY_PATH" export LD_LIBRARY_PATH="$(pwd):$LD_LIBRARY_PATH"
gdb --eval-command="handle SIGUSR1 nostop" \ gdb --eval-command="handle SIGUSR1 nostop" \
--eval-command="set pagination off" \ --eval-command="set pagination off" \
--eval-command="set substitute-path /sledge/runtime $project_path" \ --eval-command="set substitute-path /sledge/runtime $project_path" \
--eval-command="run ../tests/preemption/test_fibonacci_multiple.json" \ --eval-command="run ../tests/preemption/test_fibonacci_multiple.json" \
./sledgert ./sledgert
cd ../../tests cd ../../tests

@ -2,8 +2,8 @@
# Test Driver Script # Test Driver Script
if [[ $0 != "./test.sh" ]]; then if [[ $0 != "./test.sh" ]]; then
echo "Must run in same directory as ./test.sh" echo "Must run in same directory as ./test.sh"
exit 1 exit 1
fi fi
base_dir=$(pwd) base_dir=$(pwd)

Loading…
Cancel
Save