diff --git a/.vscode/settings.json b/.vscode/settings.json
index 013f581..45fd335 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -69,5 +69,7 @@
"C_Cpp.files.exclude": {
"awsm/wasmception": true,
"**/.vscode": true
- }
+ },
+
+ "shellformat.flag": "-ln=bash -i 0 -bn -ci -sr -kp"
}
diff --git a/devenv.sh b/devenv.sh
index 299a30e..1b23e08 100755
--- a/devenv.sh
+++ b/devenv.sh
@@ -29,91 +29,91 @@ SYS_BUILD_TIMEOUT=0
# Provides help to user on how to use this script
usage() {
- echo "usage $0 "
- echo " setup Build a sledge runtime container and sledge-dev, a build container with toolchain needed to compile your own functions"
- echo " run Start the sledge Docker image as an interactive container with this repository mounted"
- echo " stop Stop and remove the sledge Docker container after use"
- echo " rm Remove the sledge runtime container and image, but leaves the sledge-dev container in place"
- echo " rma Removes all the sledge and sledge-dev containers and images"
+ echo "usage $0 "
+ echo " setup Build a sledge runtime container and sledge-dev, a build container with toolchain needed to compile your own functions"
+ echo " run Start the sledge Docker image as an interactive container with this repository mounted"
+ echo " stop Stop and remove the sledge Docker container after use"
+ echo " rm Remove the sledge runtime container and image, but leaves the sledge-dev container in place"
+ echo " rma Removes all the sledge and sledge-dev containers and images"
}
# Given a number of seconds, initiates a countdown sequence
countdown() {
- tmp_cnt=$1
- while [ "${tmp_cnt}" -gt 0 ]; do
- printf "%d." "${tmp_cnt}"
- sleep 1
- tmp_cnt=$((tmp_cnt - 1))
- done
- echo
+ tmp_cnt=$1
+ while [ "${tmp_cnt}" -gt 0 ]; do
+ printf "%d." "${tmp_cnt}"
+ sleep 1
+ tmp_cnt=$((tmp_cnt - 1))
+ done
+ echo
}
# Build and runs the build container sledge-dev and then executes make install on the project
# Finally "forks" the sledge-dev build container into the sledge execution container
envsetup() {
- # I want to create this container before the Makefile executes so that my user owns it
- # This allows me to execute the sledgert binary from my local host
- mkdir -p "$HOST_ROOT/runtime/bin"
-
- # Check to see if the sledge:latest image exists, exiting if it does
- # Because sledge:latest is "forked" after completing envsetup, this suggests that envsetup was already run
- if docker image inspect ${SYS_DOC_NAMETAG} 1>/dev/null 2>/dev/null; then
- echo "${SYS_DOC_NAMETAG} image exists, which means that 'devenv.sh setup' already ran to completion!"
- echo "If you are explicitly trying to rebuild SLEdge, run the following:"
- echo "devenv.sh rma | Removes the images sledge:latest AND sledge-dev:latest"
- exit 1
- fi
-
- echo "Setting up ${SYS_NAME}"
-
- echo "Updating git submodules"
- git submodule update --init --recursive 2>/dev/null || :d
-
- echo "Using Dockerfile.$(uname -m)"
- rm -f Dockerfile
- ln -s Dockerfile.$(uname -m) Dockerfile
-
- # As a user nicety, warn the user if sledge-dev is detected
- # This UX differs from detecting sledge, which immediately exits
- # This is disabled because it doesn't seem useful
- if
- docker image inspect "${SYS_DOC_DEVNAMETAG}" 1>/dev/null 2>/dev/null && [ $SYS_BUILD_TIMEOUT -gt 0 ]
- then
- echo "${SYS_DOC_DEVNAME} image exists, rebuilding it"
- echo "(you have ${SYS_BUILD_TIMEOUT}secs to stop the rebuild)"
- countdown ${SYS_BUILD_TIMEOUT}
- fi
-
- # Build the image sledge-dev:latest
- echo "Building ${SYS_DOC_DEVNAMETAG}"
- docker build --tag "${SYS_DOC_DEVNAMETAG}" .
-
- # Run the sledge-dev:latest image as a background container named sledge-dev with the project directly mounted at /sledge
- echo "Creating the build container ${SYS_DOC_NAMETAG} from the image ${SYS_DOC_DEVNAMETAG}"
- docker run \
- --privileged \
- --name=${SYS_DOC_DEVNAME} \
- --detach \
- --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
- "${SYS_DOC_DEVNAMETAG}" /bin/sleep 99999999 >/dev/null
-
- # Execute the make install command on the sledge-dev image to build the project
- echo "Building ${SYS_NAME}"
- docker exec \
- --tty \
- --workdir "${HOST_SYS_MOUNT}" \
- ${SYS_DOC_DEVNAME} make install
-
- # Create the image sledge:latest from the current state of docker-dev
- echo "Tagging the new image"
- docker container commit ${SYS_DOC_DEVNAME} ${SYS_DOC_NAMETAG}
-
- # Kill and remove the running sledge-dev container
- echo "Cleaning up ${SYS_DOC_DEVNAME}"
- docker kill ${SYS_DOC_DEVNAME}
- docker rm ${SYS_DOC_DEVNAME}
-
- echo "Done!"
+ # I want to create this container before the Makefile executes so that my user owns it
+ # This allows me to execute the sledgert binary from my local host
+ mkdir -p "$HOST_ROOT/runtime/bin"
+
+ # Check to see if the sledge:latest image exists, exiting if it does
+ # Because sledge:latest is "forked" after completing envsetup, this suggests that envsetup was already run
+ if docker image inspect ${SYS_DOC_NAMETAG} 1> /dev/null 2> /dev/null; then
+ echo "${SYS_DOC_NAMETAG} image exists, which means that 'devenv.sh setup' already ran to completion!"
+ echo "If you are explicitly trying to rebuild SLEdge, run the following:"
+ echo "devenv.sh rma | Removes the images sledge:latest AND sledge-dev:latest"
+ exit 1
+ fi
+
+ echo "Setting up ${SYS_NAME}"
+
+ echo "Updating git submodules"
+ git submodule update --init --recursive 2> /dev/null || :d
+
+ echo "Using Dockerfile.$(uname -m)"
+ rm -f Dockerfile
+ ln -s Dockerfile.$(uname -m) Dockerfile
+
+ # As a user nicety, warn the user if sledge-dev is detected
+ # This UX differs from detecting sledge, which immediately exits
+ # This is disabled because it doesn't seem useful
+ if
+ docker image inspect "${SYS_DOC_DEVNAMETAG}" 1> /dev/null 2> /dev/null && [ $SYS_BUILD_TIMEOUT -gt 0 ]
+ then
+ echo "${SYS_DOC_DEVNAME} image exists, rebuilding it"
+ echo "(you have ${SYS_BUILD_TIMEOUT}secs to stop the rebuild)"
+ countdown ${SYS_BUILD_TIMEOUT}
+ fi
+
+ # Build the image sledge-dev:latest
+ echo "Building ${SYS_DOC_DEVNAMETAG}"
+ docker build --tag "${SYS_DOC_DEVNAMETAG}" .
+
+ # Run the sledge-dev:latest image as a background container named sledge-dev with the project directly mounted at /sledge
+ echo "Creating the build container ${SYS_DOC_NAMETAG} from the image ${SYS_DOC_DEVNAMETAG}"
+ docker run \
+ --privileged \
+ --name=${SYS_DOC_DEVNAME} \
+ --detach \
+ --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
+ "${SYS_DOC_DEVNAMETAG}" /bin/sleep 99999999 > /dev/null
+
+ # Execute the make install command on the sledge-dev image to build the project
+ echo "Building ${SYS_NAME}"
+ docker exec \
+ --tty \
+ --workdir "${HOST_SYS_MOUNT}" \
+ ${SYS_DOC_DEVNAME} make install
+
+ # Create the image sledge:latest from the current state of docker-dev
+ echo "Tagging the new image"
+ docker container commit ${SYS_DOC_DEVNAME} ${SYS_DOC_NAMETAG}
+
+ # Kill and remove the running sledge-dev container
+ echo "Cleaning up ${SYS_DOC_DEVNAME}"
+ docker kill ${SYS_DOC_DEVNAME}
+ docker rm ${SYS_DOC_DEVNAME}
+
+ echo "Done!"
}
# Executes an interactive BASH shell in the sledge container with /sledge as the working directory
@@ -121,75 +121,75 @@ envsetup() {
# If the image sledge:latest does not exist, automatically runs envsetup to build sledge and create it
# If the a container names sledge is not running, starts it from sledge:latest, mounting the SLEdge project directory to /sledge
envrun() {
- if ! docker image inspect ${SYS_DOC_NAMETAG} >/dev/null; then
- envsetup
- fi
-
- if docker ps -f name=${SYS_DOC_NAME} --format '{{.Names}}' | grep -q "^${SYS_DOC_NAME}"; then
- echo "Container is running" >&2
- else
-
- echo "Starting ${SYS_DOC_NAME}"
- docker run \
- --privileged \
- --security-opt seccomp:unconfined \
- --name=${SYS_DOC_NAME} \
- --detach \
- --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
- ${SYS_DOC_NAMETAG} /bin/sleep 99999999 >/dev/null
- fi
-
- echo "Running shell"
- docker exec --tty --interactive --workdir "${HOST_SYS_MOUNT}" ${SYS_DOC_NAME} /bin/bash
+ if ! docker image inspect ${SYS_DOC_NAMETAG} > /dev/null; then
+ envsetup
+ fi
+
+ if docker ps -f name=${SYS_DOC_NAME} --format '{{.Names}}' | grep -q "^${SYS_DOC_NAME}"; then
+ echo "Container is running" >&2
+ else
+
+ echo "Starting ${SYS_DOC_NAME}"
+ docker run \
+ --privileged \
+ --security-opt seccomp:unconfined \
+ --name=${SYS_DOC_NAME} \
+ --detach \
+ --mount type=bind,src="$(cd "$(dirname "${0}")" && pwd -P || exit 1),target=/${SYS_NAME}" \
+ ${SYS_DOC_NAMETAG} /bin/sleep 99999999 > /dev/null
+ fi
+
+ echo "Running shell"
+ docker exec --tty --interactive --workdir "${HOST_SYS_MOUNT}" ${SYS_DOC_NAME} /bin/bash
}
# Stops and removes the sledge "runtime" container
envstop() {
- echo "Stopping container"
- docker stop ${SYS_DOC_NAME}
- echo "Removing container"
- docker rm ${SYS_DOC_NAME}
+ echo "Stopping container"
+ docker stop ${SYS_DOC_NAME}
+ echo "Removing container"
+ docker rm ${SYS_DOC_NAME}
}
# Stops and removes the sledge "runtime" container and then removes the sledge "runtime" image
envrm() {
- envstop
- docker rmi ${SYS_DOC_NAME}
+ envstop
+ docker rmi ${SYS_DOC_NAME}
}
# Stops and removes the sledge "runtime" container and image and then removes the sledge-dev "build image" image
envrma() {
- envrm
- docker rmi ${SYS_DOC_DEVNAME}
+ envrm
+ docker rmi ${SYS_DOC_DEVNAME}
}
if [ $# -ne 1 ]; then
- echo "incorrect number of arguments: $*"
- usage "$0"
- exit 1
+ echo "incorrect number of arguments: $*"
+ usage "$0"
+ exit 1
fi
case $1 in
- run)
- envrun
- ;;
- stop)
- envstop
- ;;
- setup)
- envsetup
- ;;
- rm)
- envrm
- ;;
- rma)
- envrma
- ;;
- *)
- echo "invalid option: $1"
- usage "$0"
- exit 1
- ;;
+ run)
+ envrun
+ ;;
+ stop)
+ envstop
+ ;;
+ setup)
+ envsetup
+ ;;
+ rm)
+ envrm
+ ;;
+ rma)
+ envrma
+ ;;
+ *)
+ echo "invalid option: $1"
+ usage "$0"
+ exit 1
+ ;;
esac
echo
echo "done!"
diff --git a/format.sh b/format.sh
index 4141e49..197abd7 100755
--- a/format.sh
+++ b/format.sh
@@ -2,7 +2,7 @@
validate() {
utility="clang-format"
- utility_version="$("$utility" --version 2>/dev/null)" || {
+ utility_version="$("$utility" --version 2> /dev/null)" || {
echo "$utility not found in path!"
exit 1
}
@@ -44,19 +44,19 @@ help() {
dry_run() {
find runtime \
\( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \
- -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print |
- xargs clang-format -Werror -n -ferror-limit=0
+ -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print \
+ | xargs clang-format -Werror -n -ferror-limit=0
}
format() {
find runtime \
\( -path "runtime/thirdparty" -o -path "runtime/tests/gocr" -o -path "runtime/tests/TinyEKF" -o -path "runtime/tests/CMSIS_5_NN" -o -path "runtime/tests/sod" -o -path "runtime/tests/**/thirdparty" \) -prune -false -o \
- -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print |
- xargs clang-format -i
+ -type f \( -iname \*.h -o -iname \*.c -o -iname \*.s \) -print \
+ | xargs clang-format -i
}
case $1 in
-"-h" | "--help") help ;;
-"-d" | "--dry-run") validate && dry_run ;;
-"") validate && format ;;
+ "-h" | "--help") help ;;
+ "-d" | "--dry-run") validate && dry_run ;;
+ "") validate && format ;;
esac
diff --git a/install.sh b/install.sh
index 60df1d6..ce9989f 100755
--- a/install.sh
+++ b/install.sh
@@ -26,29 +26,29 @@ echo "Setting up toolchain environment"
for last_arg in "$@"; do :; done
if [[ $last_arg == "-d" ]] || [[ $last_arg == "--dry-run" ]]; then
- DRY_RUN=true
+ DRY_RUN=true
else
- DRY_RUN=false
+ DRY_RUN=false
fi
if $DRY_RUN; then
- DRY_RUN_PREFIX=echo
+ DRY_RUN_PREFIX=echo
else
- DRY_RUN_PREFIX=
+ DRY_RUN_PREFIX=
fi
# Get the absolute path of the topmost project directly
# The use of dirname is particular. It seems unneccesary how this script is run
SYS_SRC_PREFIX=${SYS_SRC_PREFIX:-"$(
- cd "$(dirname "$(dirname "${0}")")" || exit 1
- pwd -P
+ cd "$(dirname "$(dirname "${0}")")" || exit 1
+ pwd -P
)"}
$DRY_RUN && echo SYS_SRC_PREFIX: "$SYS_SRC_PREFIX"
# And check for the presence of this script to make sure we got it right
if [ ! -x "${SYS_SRC_PREFIX}/install.sh" ]; then
- echo "Unable to find the install script" >&2
- exit 1
+ echo "Unable to find the install script" >&2
+ exit 1
fi
SYS_NAME='sledge'
@@ -79,21 +79,21 @@ $DRY_RUN && echo SYS_LIB_DIR: "$SYS_LIB_DIR"
# The default is wasmception
# Currently, WASI is not actually supported by the runtime.
if [ $# -eq 0 ] || [ "$1" = "wasmception" ]; then
- echo "Setting up for wasmception"
- WASM_PREFIX=${WASM_PREFIX:-"${SYS_SRC_PREFIX}/${COMPILER}/wasmception"}
- WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/dist/bin"}
- WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/sysroot"}
- WASM_TARGET=${WASM_TARGET:-"wasm32-unknown-unknown-wasm"}
- WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"}
- WASM_TOOLS=(ar)
+ echo "Setting up for wasmception"
+ WASM_PREFIX=${WASM_PREFIX:-"${SYS_SRC_PREFIX}/${COMPILER}/wasmception"}
+ WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/dist/bin"}
+ WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/sysroot"}
+ WASM_TARGET=${WASM_TARGET:-"wasm32-unknown-unknown-wasm"}
+ WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"}
+ WASM_TOOLS=(ar)
elif [ "$1" = "wasi" ]; then
- echo "Setting up for wasi-sdk"
- WASM_PREFIX=${WASM_PREFIX:-${WASM_SDK:-"/opt/wasi-sdk"}}
- WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/bin"}
- WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/share/sysroot"}
- WASM_TARGET=${WASM_TARGET:-"wasm32-wasi"}
- WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"}
- WASM_TOOLS=(ar dwarfdump nm ranlib size)
+ echo "Setting up for wasi-sdk"
+ WASM_PREFIX=${WASM_PREFIX:-${WASM_SDK:-"/opt/wasi-sdk"}}
+ WASM_BIN=${WASM_BIN:-"${WASM_PREFIX}/bin"}
+ WASM_SYSROOT=${WASM_SYSROOT:-"${WASM_PREFIX}/share/sysroot"}
+ WASM_TARGET=${WASM_TARGET:-"wasm32-wasi"}
+ WASM_BIN_PREFIX=${WASM_BIN_PREFIX:-"$WASM_TARGET"}
+ WASM_TOOLS=(ar dwarfdump nm ranlib size)
fi
$DRY_RUN && echo WASM_PREFIX: "$WASM_PREFIX"
$DRY_RUN && echo WASM_BIN: "$WASM_BIN"
@@ -117,27 +117,27 @@ $DRY_RUN_PREFIX ln -sfv "${SYS_COMPILER_REL_DIR}/${COMPILER_EXECUTABLE}" "${SYS_
# For example, when wasmception is set, calling `wasm32-unknown-unknown-wasm-clang` results in
# `exec "/sledge/awsm/wasmception/dist/bin/clang" --target="wasm32-unknown-unknown-wasm" --sysroot="/sledge/awsm/wasmception/sysroot" "$@"`
for file in clang clang++; do
- wrapper_file="$(mktemp)"
- cat >"$wrapper_file" < "$wrapper_file" << EOT
#! /bin/sh
exec "${WASM_BIN}/${file}" --target="$WASM_TARGET" --sysroot="$WASM_SYSROOT" "\$@"
EOT
- cat "$wrapper_file"
- $DRY_RUN_PREFIX install -p -v "$wrapper_file" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
- $DRY_RUN && echo rm -f "$wrapper_file"
- rm -f "$wrapper_file"
+ cat "$wrapper_file"
+ $DRY_RUN_PREFIX install -p -v "$wrapper_file" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
+ $DRY_RUN && echo rm -f "$wrapper_file"
+ rm -f "$wrapper_file"
done
# Link the LLVM Tools with the proper prefix
for file in "${WASM_TOOLS[@]}"; do
- $DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/llvm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
+ $DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/llvm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
done
# Link any other tools with the proper prefix
OTHER_TOOLS=(ld)
for file in "${OTHER_TOOLS[@]}"; do
- $DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/wasm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
+ $DRY_RUN_PREFIX ln -sfv "${WASM_BIN}/wasm-${file}" "${SYS_BIN_DIR}/${WASM_BIN_PREFIX}-${file}"
done
# Link clang as gcc if needed
diff --git a/runtime/experiments/applications/debug.sh b/runtime/experiments/applications/debug.sh
index 62c64e3..f40fa45 100755
--- a/runtime/experiments/applications/debug.sh
+++ b/runtime/experiments/applications/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ekf/by_iteration/debug.sh b/runtime/experiments/applications/ekf/by_iteration/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ekf/by_iteration/debug.sh
+++ b/runtime/experiments/applications/ekf/by_iteration/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ekf/by_iteration/run.sh b/runtime/experiments/applications/ekf/by_iteration/run.sh
index e129276..7f4f84c 100755
--- a/runtime/experiments/applications/ekf/by_iteration/run.sh
+++ b/runtime/experiments/applications/ekf/by_iteration/run.sh
@@ -14,46 +14,46 @@ did_pass=true
# Copy data if not here
if [[ ! -f "./initial_state.dat" ]]; then
- cp $runtime_directory/tests/TinyEKF/extras/c/ekf_raw.dat ./initial_state.dat
+ cp $runtime_directory/tests/TinyEKF/extras/c/ekf_raw.dat ./initial_state.dat
fi
if [ "$1" != "-d" ]; then
- SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 &
- sleep 2
+ SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
+ sleep 2
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
success_count=0
total_count=50
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@initial_state.dat" localhost:10000 2>/dev/null >./one_iteration_res.dat
- curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@one_iteration_res.dat" localhost:10001 2>/dev/null >./two_iterations_res.dat
- curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@two_iterations_res.dat" localhost:10002 2>/dev/null >./three_iterations_res.dat
- if diff -s one_iteration_res.dat one_iteration.dat && diff -s two_iterations_res.dat two_iterations.dat && diff -s three_iterations_res.dat three_iterations.dat; then
- success_count=$((success_count + 1))
- rm *_res.dat
- else
- echo "FAIL"
- did_pass=false
- rm *_res.dat
- break
- fi
+ echo "$i"
+ curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@initial_state.dat" localhost:10000 2> /dev/null > ./one_iteration_res.dat
+ curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@one_iteration_res.dat" localhost:10001 2> /dev/null > ./two_iterations_res.dat
+ curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@two_iterations_res.dat" localhost:10002 2> /dev/null > ./three_iterations_res.dat
+ if diff -s one_iteration_res.dat one_iteration.dat && diff -s two_iterations_res.dat two_iterations.dat && diff -s three_iterations_res.dat three_iterations.dat; then
+ success_count=$((success_count + 1))
+ rm *_res.dat
+ else
+ echo "FAIL"
+ did_pass=false
+ rm *_res.dat
+ break
+ fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
if $did_pass; then
- exit 0
+ exit 0
else
- exit 1
+ exit 1
fi
diff --git a/runtime/experiments/applications/ekf/one_iteration/debug.sh b/runtime/experiments/applications/ekf/one_iteration/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ekf/one_iteration/debug.sh
+++ b/runtime/experiments/applications/ekf/one_iteration/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ekf/one_iteration/run.sh b/runtime/experiments/applications/ekf/one_iteration/run.sh
index 3c9f57d..a06dc17 100755
--- a/runtime/experiments/applications/ekf/one_iteration/run.sh
+++ b/runtime/experiments/applications/ekf/one_iteration/run.sh
@@ -12,44 +12,44 @@ did_pass=true
# Copy data if not here
if [[ ! -f "./ekf_raw.dat" ]]; then
- cp ../../../tests/TinyEKF/extras/c/ekf_raw.dat ./ekf_raw.dat
+ cp ../../../tests/TinyEKF/extras/c/ekf_raw.dat ./ekf_raw.dat
fi
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
-expected_result="$(tr -d '\0' <./expected_result.dat)"
+expected_result="$(tr -d '\0' < ./expected_result.dat)"
success_count=0
total_count=50
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- result="$(curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@ekf_raw.dat" localhost:10000 2>/dev/null | tr -d '\0')"
- if [[ "$expected_result" == "$result" ]]; then
- success_count=$((success_count + 1))
- else
- echo "FAIL"
- did_pass=false
- break
- fi
+ echo "$i"
+ result="$(curl -H 'Expect:' -H "Content-Type: application/octet-stream" --data-binary "@ekf_raw.dat" localhost:10000 2> /dev/null | tr -d '\0')"
+ if [[ "$expected_result" == "$result" ]]; then
+ success_count=$((success_count + 1))
+ else
+ echo "FAIL"
+ did_pass=false
+ break
+ fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
if $did_pass; then
- exit 0
+ exit 0
else
- exit 1
+ exit 1
fi
diff --git a/runtime/experiments/applications/imageclassification/debug.sh b/runtime/experiments/applications/imageclassification/debug.sh
index a561392..80784d0 100755
--- a/runtime/experiments/applications/imageclassification/debug.sh
+++ b/runtime/experiments/applications/imageclassification/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/imageclassification/run.sh b/runtime/experiments/applications/imageclassification/run.sh
index 32cb13b..4c095cb 100755
--- a/runtime/experiments/applications/imageclassification/run.sh
+++ b/runtime/experiments/applications/imageclassification/run.sh
@@ -15,10 +15,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
# fi
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
# expected_size="$(find expected_result.jpg -printf "%s")"
@@ -31,39 +31,39 @@ file_type=bmp
# file_type=png
for class in airplane automobile bird cat deer dog frog horse ship truck; do
- for instance in 1 2 3 4 5 6 7 8 9 10; do
- echo "Classifying $class$instance.$file_type"
- curl -H 'Expect:' -H "Content-Type: Image/$file_type" --data-binary "@images/$file_type/$class$instance.$file_type" localhost:10000 2>/dev/null
- done
+ for instance in 1 2 3 4 5 6 7 8 9 10; do
+ echo "Classifying $class$instance.$file_type"
+ curl -H 'Expect:' -H "Content-Type: Image/$file_type" --data-binary "@images/$file_type/$class$instance.$file_type" localhost:10000 2> /dev/null
+ done
done
exit
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- ext="$RANDOM"
- curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@plate.jpg" --output "result_$ext.jpg" localhost:10000 2>/dev/null
- actual_size="$(find result_"$ext".jpg -printf "%s")"
+ echo "$i"
+ ext="$RANDOM"
+ curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@plate.jpg" --output "result_$ext.jpg" localhost:10000 2> /dev/null
+ actual_size="$(find result_"$ext".jpg -printf "%s")"
- # echo "$result"
- if [[ "$expected_size" == "$actual_size" ]]; then
- echo "SUCCESS $success_count"
- else
- echo "FAIL"
- echo "Expected Size:"
- echo "$expected_size"
- echo "==============================================="
- echo "Actual Size:"
- echo "$actual_size"
- fi
+ # echo "$result"
+ if [[ "$expected_size" == "$actual_size" ]]; then
+ echo "SUCCESS $success_count"
+ else
+ echo "FAIL"
+ echo "Expected Size:"
+ echo "$expected_size"
+ echo "==============================================="
+ echo "Actual Size:"
+ echo "$actual_size"
+ fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- rm result_*.jpg
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ rm result_*.jpg
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
diff --git a/runtime/experiments/applications/imageresize/by_resolution/debug.sh b/runtime/experiments/applications/imageresize/by_resolution/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/imageresize/by_resolution/debug.sh
+++ b/runtime/experiments/applications/imageresize/by_resolution/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/imageresize/by_resolution/install.sh b/runtime/experiments/applications/imageresize/by_resolution/install.sh
index f1b4de5..3db7a6e 100755
--- a/runtime/experiments/applications/imageresize/by_resolution/install.sh
+++ b/runtime/experiments/applications/imageresize/by_resolution/install.sh
@@ -2,7 +2,7 @@
# Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then
- apt-get install imagemagick
+ apt-get install imagemagick
else
- sudo apt-get install imagemagick
+ sudo apt-get install imagemagick
fi
diff --git a/runtime/experiments/applications/imageresize/by_resolution/run.sh b/runtime/experiments/applications/imageresize/by_resolution/run.sh
index 3d44144..b5a3862 100755
--- a/runtime/experiments/applications/imageresize/by_resolution/run.sh
+++ b/runtime/experiments/applications/imageresize/by_resolution/run.sh
@@ -11,54 +11,54 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then
- SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
success_count=0
total_count=100
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- ext="$RANDOM"
+ echo "$i"
+ ext="$RANDOM"
- curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_small.jpg" --output "result_${ext}_small.png" localhost:10000 2>/dev/null 1>/dev/null
- pixel_differences="$(compare -identify -metric AE "result_${ext}_small.png" expected_result_small.png null: 2>&1 >/dev/null)"
- if [[ "$pixel_differences" != "0" ]]; then
- echo "Small FAIL"
- echo "$pixel_differences pixel differences detected"
- exit 1
- fi
+ curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_small.jpg" --output "result_${ext}_small.png" localhost:10000 2> /dev/null 1> /dev/null
+ pixel_differences="$(compare -identify -metric AE "result_${ext}_small.png" expected_result_small.png null: 2>&1 > /dev/null)"
+ if [[ "$pixel_differences" != "0" ]]; then
+ echo "Small FAIL"
+ echo "$pixel_differences pixel differences detected"
+ exit 1
+ fi
- curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_medium.jpg" --output "result_${ext}_medium.png" localhost:10001 2>/dev/null 1>/dev/null
- pixel_differences="$(compare -identify -metric AE "result_${ext}_medium.png" expected_result_medium.png null: 2>&1 >/dev/null)"
- if [[ "$pixel_differences" != "0" ]]; then
- echo "Small FAIL"
- echo "$pixel_differences pixel differences detected"
- exit 1
- fi
+ curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_medium.jpg" --output "result_${ext}_medium.png" localhost:10001 2> /dev/null 1> /dev/null
+ pixel_differences="$(compare -identify -metric AE "result_${ext}_medium.png" expected_result_medium.png null: 2>&1 > /dev/null)"
+ if [[ "$pixel_differences" != "0" ]]; then
+ echo "Small FAIL"
+ echo "$pixel_differences pixel differences detected"
+ exit 1
+ fi
- curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_large.jpg" --output "result_${ext}_large.png" localhost:10002 2>/dev/null 1>/dev/null
- pixel_differences="$(compare -identify -metric AE "result_${ext}_large.png" expected_result_large.png null: 2>&1 >/dev/null)"
- if [[ "$pixel_differences" != "0" ]]; then
- echo "Small FAIL"
- echo "$pixel_differences pixel differences detected"
- exit 1
- fi
+ curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@shrinking_man_large.jpg" --output "result_${ext}_large.png" localhost:10002 2> /dev/null 1> /dev/null
+ pixel_differences="$(compare -identify -metric AE "result_${ext}_large.png" expected_result_large.png null: 2>&1 > /dev/null)"
+ if [[ "$pixel_differences" != "0" ]]; then
+ echo "Small FAIL"
+ echo "$pixel_differences pixel differences detected"
+ exit 1
+ fi
- success_count=$((success_count + 1))
+ success_count=$((success_count + 1))
done
echo "$success_count / $total_count"
rm -f result_*.png
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
exit 0
diff --git a/runtime/experiments/applications/imageresize/test/debug.sh b/runtime/experiments/applications/imageresize/test/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/imageresize/test/debug.sh
+++ b/runtime/experiments/applications/imageresize/test/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/imageresize/test/install.sh b/runtime/experiments/applications/imageresize/test/install.sh
index f1b4de5..3db7a6e 100755
--- a/runtime/experiments/applications/imageresize/test/install.sh
+++ b/runtime/experiments/applications/imageresize/test/install.sh
@@ -2,7 +2,7 @@
# Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then
- apt-get install imagemagick
+ apt-get install imagemagick
else
- sudo apt-get install imagemagick
+ sudo apt-get install imagemagick
fi
diff --git a/runtime/experiments/applications/imageresize/test/run.sh b/runtime/experiments/applications/imageresize/test/run.sh
index 1cd8d77..501f344 100755
--- a/runtime/experiments/applications/imageresize/test/run.sh
+++ b/runtime/experiments/applications/imageresize/test/run.sh
@@ -11,43 +11,43 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
# Copy Flower Image if not here
if [[ ! -f "./flower.jpg" ]]; then
- cp ../../../../tests/sod/bin/flower.jpg ./flower.jpg
+ cp ../../../../tests/sod/bin/flower.jpg ./flower.jpg
fi
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
success_count=0
total_count=10
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- ext="$RANDOM"
- curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@flower.jpg" --output "result_$ext.png" localhost:10000 2>/dev/null 1>/dev/null || exit 1
-
- pixel_differences="$(compare -identify -metric AE "result_$ext.png" expected_result.png null: 2>&1 >/dev/null)"
-
- if [[ "$pixel_differences" == "0" ]]; then
- success_count=$((success_count + 1))
- else
- echo "FAIL"
- echo "$pixel_differences pixel differences detected"
- exit 1
- fi
+ echo "$i"
+ ext="$RANDOM"
+ curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@flower.jpg" --output "result_$ext.png" localhost:10000 2> /dev/null 1> /dev/null || exit 1
+
+ pixel_differences="$(compare -identify -metric AE "result_$ext.png" expected_result.png null: 2>&1 > /dev/null)"
+
+ if [[ "$pixel_differences" == "0" ]]; then
+ success_count=$((success_count + 1))
+ else
+ echo "FAIL"
+ echo "$pixel_differences pixel differences detected"
+ exit 1
+ fi
done
echo "$success_count / $total_count"
rm result_*.png
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
exit 0
diff --git a/runtime/experiments/applications/licenseplate/by_plate_count/debug.sh b/runtime/experiments/applications/licenseplate/by_plate_count/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/licenseplate/by_plate_count/debug.sh
+++ b/runtime/experiments/applications/licenseplate/by_plate_count/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/licenseplate/by_plate_count/run.sh b/runtime/experiments/applications/licenseplate/by_plate_count/run.sh
index 06420c7..cb576d7 100755
--- a/runtime/experiments/applications/licenseplate/by_plate_count/run.sh
+++ b/runtime/experiments/applications/licenseplate/by_plate_count/run.sh
@@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then
- SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
one_plate=(Cars0 Cars1 Cars2 Cars3 Cars4)
@@ -22,21 +22,21 @@ two_plates=(Cars71 Cars87 Cars143 Cars295 Cars316)
four_plates=(Cars106 Cars146 Cars249 Cars277 Cars330)
for image in ${one_plate[*]}; do
- echo "@./1/${image}.png"
- curl --data-binary "@./1/${image}.png" --output - localhost:10000
+ echo "@./1/${image}.png"
+ curl --data-binary "@./1/${image}.png" --output - localhost:10000
done
for image in ${two_plates[*]}; do
- echo "@./2/${image}.png"
- curl --data-binary "@./2/${image}.png" --output - localhost:10001
+ echo "@./2/${image}.png"
+ curl --data-binary "@./2/${image}.png" --output - localhost:10001
done
for image in ${four_plates[*]}; do
- echo "@./4/${image}.png"
- curl --data-binary "@./4/${image}.png" --output - localhost:10002
+ echo "@./4/${image}.png"
+ curl --data-binary "@./4/${image}.png" --output - localhost:10002
done
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
diff --git a/runtime/experiments/applications/ocr/by_dpi/debug.sh b/runtime/experiments/applications/ocr/by_dpi/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ocr/by_dpi/debug.sh
+++ b/runtime/experiments/applications/ocr/by_dpi/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ocr/by_dpi/install.sh b/runtime/experiments/applications/ocr/by_dpi/install.sh
index 43f930b..6d4d700 100755
--- a/runtime/experiments/applications/ocr/by_dpi/install.sh
+++ b/runtime/experiments/applications/ocr/by_dpi/install.sh
@@ -2,7 +2,7 @@
# Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then
- apt-get install netpbm pango1.0-tools wamerican
+ apt-get install netpbm pango1.0-tools wamerican
else
- sudo apt-get install netpbm pango1.0-tools wamerican
+ sudo apt-get install netpbm pango1.0-tools wamerican
fi
diff --git a/runtime/experiments/applications/ocr/by_dpi/run.sh b/runtime/experiments/applications/ocr/by_dpi/run.sh
index 6b5d8f5..0d25b80 100755
--- a/runtime/experiments/applications/ocr/by_dpi/run.sh
+++ b/runtime/experiments/applications/ocr/by_dpi/run.sh
@@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then
- SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 &
- sleep 2
+ SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
+ sleep 2
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
word_count=100
@@ -28,28 +28,28 @@ dpi_to_port[144]=10002
total_count=100
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
+ echo "$i"
+ words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
- for dpi in "${dpis[@]}"; do
- echo "${dpi}"_dpi.pnm
- pango-view --dpi=$dpi --font=mono -qo "${dpi}"_dpi.png -t "$words"
- pngtopnm "${dpi}"_dpi.png >"${dpi}"_dpi.pnm
+ for dpi in "${dpis[@]}"; do
+ echo "${dpi}"_dpi.pnm
+ pango-view --dpi=$dpi --font=mono -qo "${dpi}"_dpi.png -t "$words"
+ pngtopnm "${dpi}"_dpi.png > "${dpi}"_dpi.pnm
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${dpi}"_dpi.pnm localhost:${dpi_to_port[$dpi]} 2>/dev/null)
+ result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${dpi}"_dpi.pnm localhost:${dpi_to_port[$dpi]} 2> /dev/null)
- diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
- echo "==============================================="
- done
+ diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
+ echo "==============================================="
+ done
done
if [ "$1" != "-d" ]; then
- sleep 2
- echo -n "Running Cleanup: "
- rm ./*.png ./*.pnm
- pkill --signal sigterm sledgert >/dev/null 2>/dev/null
- sleep 2
- pkill sledgert -9 >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 2
+ echo -n "Running Cleanup: "
+ rm ./*.png ./*.pnm
+ pkill --signal sigterm sledgert > /dev/null 2> /dev/null
+ sleep 2
+ pkill sledgert -9 > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
diff --git a/runtime/experiments/applications/ocr/by_font/debug.sh b/runtime/experiments/applications/ocr/by_font/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ocr/by_font/debug.sh
+++ b/runtime/experiments/applications/ocr/by_font/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ocr/by_font/install.sh b/runtime/experiments/applications/ocr/by_font/install.sh
index e2eff34..ada0119 100755
--- a/runtime/experiments/applications/ocr/by_font/install.sh
+++ b/runtime/experiments/applications/ocr/by_font/install.sh
@@ -2,7 +2,7 @@
# Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then
- apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu
+ apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu
else
- sudo apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu
+ sudo apt-get install netpbm pango1.0-tools wamerican fonts-roboto fonts-cascadia-code fonts-dejavu
fi
diff --git a/runtime/experiments/applications/ocr/by_font/run.sh b/runtime/experiments/applications/ocr/by_font/run.sh
index a1bdf34..83acad0 100755
--- a/runtime/experiments/applications/ocr/by_font/run.sh
+++ b/runtime/experiments/applications/ocr/by_font/run.sh
@@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then
- SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 &
- sleep 2
+ SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
+ sleep 2
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
word_count=100
@@ -22,45 +22,45 @@ fonts=("DejaVu Sans Mono" "Roboto" "Cascadia Code")
total_count=10
for ((i = 1; i <= total_count; i++)); do
- echo "Test $i"
- words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
+ echo "Test $i"
+ words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
- for font in "${fonts[@]}"; do
- # For whatever reason, templating in multiple word strips was a pain, so brute forcing
- case "$font" in
- "DejaVu Sans Mono")
- echo "DejaVu Sans Mono"
- pango-view --font="DejaVu Sans Mono" -qo mono_words.png -t "$words" || exit 1
- pngtopnm mono_words.png >mono_words.pnm || exit 1
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @mono_words.pnm localhost:10000 2>/dev/null)
- diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
- ;;
- "Roboto")
- echo "Roboto"
- pango-view --font="Roboto" -qo Roboto_words.png -t "$words" || exit 1
- pngtopnm Roboto_words.png >Roboto_words.pnm || exit 1
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Roboto_words.pnm localhost:10002 2>/dev/null)
- diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
- ;;
- "Cascadia Code")
- echo "Cascadia Code"
- pango-view --font="Cascadia Code" -qo Cascadia_Code_words.png -t "$words" || exit 1
- pngtopnm Cascadia_Code_words.png >Cascadia_Code_words.pnm || exit 1
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Cascadia_Code_words.pnm localhost:10001 2>/dev/null)
- diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
- ;;
- esac
- echo "==============================================="
- done
+ for font in "${fonts[@]}"; do
+ # For whatever reason, templating in multiple word strips was a pain, so brute forcing
+ case "$font" in
+ "DejaVu Sans Mono")
+ echo "DejaVu Sans Mono"
+ pango-view --font="DejaVu Sans Mono" -qo mono_words.png -t "$words" || exit 1
+ pngtopnm mono_words.png > mono_words.pnm || exit 1
+ result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @mono_words.pnm localhost:10000 2> /dev/null)
+ diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
+ ;;
+ "Roboto")
+ echo "Roboto"
+ pango-view --font="Roboto" -qo Roboto_words.png -t "$words" || exit 1
+ pngtopnm Roboto_words.png > Roboto_words.pnm || exit 1
+ result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Roboto_words.pnm localhost:10002 2> /dev/null)
+ diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
+ ;;
+ "Cascadia Code")
+ echo "Cascadia Code"
+ pango-view --font="Cascadia Code" -qo Cascadia_Code_words.png -t "$words" || exit 1
+ pngtopnm Cascadia_Code_words.png > Cascadia_Code_words.pnm || exit 1
+ result=$( curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @Cascadia_Code_words.pnm localhost:10001 2> /dev/null)
+ diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
+ ;;
+ esac
+ echo "==============================================="
+ done
done
if [ "$1" != "-d" ]; then
- sleep 2
- echo -n "Running Cleanup: "
- rm ./*.png ./*.pnm
- pkill --signal sigterm sledgert >/dev/null 2>/dev/null
- sleep 2
- pkill sledgert -9 >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 2
+ echo -n "Running Cleanup: "
+ rm ./*.png ./*.pnm
+ pkill --signal sigterm sledgert > /dev/null 2> /dev/null
+ sleep 2
+ pkill sledgert -9 > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
diff --git a/runtime/experiments/applications/ocr/by_word/debug.sh b/runtime/experiments/applications/ocr/by_word/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ocr/by_word/debug.sh
+++ b/runtime/experiments/applications/ocr/by_word/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ocr/by_word/install.sh b/runtime/experiments/applications/ocr/by_word/install.sh
index 43f930b..6d4d700 100755
--- a/runtime/experiments/applications/ocr/by_word/install.sh
+++ b/runtime/experiments/applications/ocr/by_word/install.sh
@@ -2,7 +2,7 @@
# Installs the deps needed for run.sh
if [ "$(whoami)" == "root" ]; then
- apt-get install netpbm pango1.0-tools wamerican
+ apt-get install netpbm pango1.0-tools wamerican
else
- sudo apt-get install netpbm pango1.0-tools wamerican
+ sudo apt-get install netpbm pango1.0-tools wamerican
fi
diff --git a/runtime/experiments/applications/ocr/by_word/run.sh b/runtime/experiments/applications/ocr/by_word/run.sh
index 7ca4e2f..9276b0a 100755
--- a/runtime/experiments/applications/ocr/by_word/run.sh
+++ b/runtime/experiments/applications/ocr/by_word/run.sh
@@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
log="$experiment_directory/log.csv"
if [ "$1" != "-d" ]; then
- SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >rt.log 2>&1 &
- sleep 2
+ SLEDGE_SANDBOX_PERF_LOG=$log PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" > rt.log 2>&1 &
+ sleep 2
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
word_counts=(1 10 100)
@@ -27,28 +27,28 @@ word_count_to_port["100_words.pnm"]=10002
total_count=100
for ((i = 0; i < total_count; i++)); do
- echo "$i"
+ echo "$i"
- for word_count in "${word_counts[@]}"; do
- echo "${word_count}"_words.pnm
- words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
- pango-view --font=mono -qo "$word_count"_words.png -t "$words" || exit 1
- pngtopnm "$word_count"_words.png >"$word_count"_words.pnm || exit 1
+ for word_count in "${word_counts[@]}"; do
+ echo "${word_count}"_words.pnm
+ words="$(shuf -n"$word_count" /usr/share/dict/american-english)"
+ pango-view --font=mono -qo "$word_count"_words.png -t "$words" || exit 1
+ pngtopnm "$word_count"_words.png > "$word_count"_words.pnm || exit 1
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${word_count}"_words.pnm localhost:${word_count_to_port["$word_count"_words.pnm]} 2>/dev/null)
+ result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary @"${word_count}"_words.pnm localhost:${word_count_to_port["$word_count"_words.pnm]} 2> /dev/null)
- diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
- echo "==============================================="
- done
+ diff -ywBZE --suppress-common-lines <(echo "$words") <(echo "$result")
+ echo "==============================================="
+ done
done
if [ "$1" != "-d" ]; then
- sleep 2
- echo -n "Running Cleanup: "
- rm ./*.png ./*.pnm
- pkill --signal sigterm sledgert >/dev/null 2>/dev/null
- sleep 2
- pkill sledgert -9 >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 2
+ echo -n "Running Cleanup: "
+ rm ./*.png ./*.pnm
+ pkill --signal sigterm sledgert > /dev/null 2> /dev/null
+ sleep 2
+ pkill sledgert -9 > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
diff --git a/runtime/experiments/applications/ocr/fivebyeight/debug.sh b/runtime/experiments/applications/ocr/fivebyeight/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ocr/fivebyeight/debug.sh
+++ b/runtime/experiments/applications/ocr/fivebyeight/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ocr/fivebyeight/run.sh b/runtime/experiments/applications/ocr/fivebyeight/run.sh
index 94e4b2c..a97be2c 100755
--- a/runtime/experiments/applications/ocr/fivebyeight/run.sh
+++ b/runtime/experiments/applications/ocr/fivebyeight/run.sh
@@ -12,10 +12,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
did_pass=true
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
expected_result="$(cat ./expected_result.txt)"
@@ -24,35 +24,35 @@ success_count=0
total_count=50
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@5x8.pnm" localhost:10000 2>/dev/null)
- # echo "$result"
- if [[ "$result" == "$expected_result" ]]; then
- success_count=$((success_count + 1))
- else
- echo "FAIL"
- echo "Expected:"
- echo "$expected_result"
- echo "==============================================="
- echo "Was:"
- echo "$result"
-
- did_pass=false
- break
- fi
+ echo "$i"
+ result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@5x8.pnm" localhost:10000 2> /dev/null)
+ # echo "$result"
+ if [[ "$result" == "$expected_result" ]]; then
+ success_count=$((success_count + 1))
+ else
+ echo "FAIL"
+ echo "Expected:"
+ echo "$expected_result"
+ echo "==============================================="
+ echo "Was:"
+ echo "$result"
+
+ did_pass=false
+ break
+ fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
if $did_pass; then
- exit 0
+ exit 0
else
- exit 1
+ exit 1
fi
diff --git a/runtime/experiments/applications/ocr/handwriting/debug.sh b/runtime/experiments/applications/ocr/handwriting/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ocr/handwriting/debug.sh
+++ b/runtime/experiments/applications/ocr/handwriting/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ocr/handwriting/run.sh b/runtime/experiments/applications/ocr/handwriting/run.sh
index 2555e17..bc29711 100755
--- a/runtime/experiments/applications/ocr/handwriting/run.sh
+++ b/runtime/experiments/applications/ocr/handwriting/run.sh
@@ -12,10 +12,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
did_pass=true
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
expected_result="$(cat ./expected_result.txt)"
@@ -23,34 +23,34 @@ success_count=0
total_count=50
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@handwrt1.pnm" localhost:10000 2>/dev/null)
- # echo "$result"
- if [[ "$result" == "$expected_result" ]]; then
- success_count=$((success_count + 1))
- else
- echo "FAIL"
- echo "Expected:"
- echo "$expected_result"
- echo "==============================================="
- echo "Was:"
- echo "$result"
- did_pass=false
- break
- fi
+ echo "$i"
+ result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@handwrt1.pnm" localhost:10000 2> /dev/null)
+ # echo "$result"
+ if [[ "$result" == "$expected_result" ]]; then
+ success_count=$((success_count + 1))
+ else
+ echo "FAIL"
+ echo "Expected:"
+ echo "$expected_result"
+ echo "==============================================="
+ echo "Was:"
+ echo "$result"
+ did_pass=false
+ break
+ fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
if $did_pass; then
- exit 0
+ exit 0
else
- exit 1
+ exit 1
fi
diff --git a/runtime/experiments/applications/ocr/hyde/debug.sh b/runtime/experiments/applications/ocr/hyde/debug.sh
index 6ab3666..879d485 100755
--- a/runtime/experiments/applications/ocr/hyde/debug.sh
+++ b/runtime/experiments/applications/ocr/hyde/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/ocr/hyde/run.sh b/runtime/experiments/applications/ocr/hyde/run.sh
index 875af1a..67b7faa 100755
--- a/runtime/experiments/applications/ocr/hyde/run.sh
+++ b/runtime/experiments/applications/ocr/hyde/run.sh
@@ -11,10 +11,10 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
did_pass=true
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
expected_result="$(cat ./expected_result.txt)"
@@ -22,34 +22,34 @@ success_count=0
total_count=50
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@hyde.pnm" localhost:10000 2>/dev/null)
- # echo "$result"
- if [[ "$result" == "$expected_result" ]]; then
- success_count=$((success_count + 1))
- else
- echo "FAIL"
- echo "Expected:"
- echo "$expected_result"
- echo "==============================================="
- echo "Was:"
- echo "$result"
- did_pass=false
- break
- fi
+ echo "$i"
+ result=$(curl -H 'Expect:' -H "Content-Type: text/plain" --data-binary "@hyde.pnm" localhost:10000 2> /dev/null)
+ # echo "$result"
+ if [[ "$result" == "$expected_result" ]]; then
+ success_count=$((success_count + 1))
+ else
+ echo "FAIL"
+ echo "Expected:"
+ echo "$expected_result"
+ echo "==============================================="
+ echo "Was:"
+ echo "$result"
+ did_pass=false
+ break
+ fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
if $did_pass; then
- exit 0
+ exit 0
else
- exit 1
+ exit 1
fi
diff --git a/runtime/experiments/applications/run.sh b/runtime/experiments/applications/run.sh
index 440ccaf..6070bcd 100755
--- a/runtime/experiments/applications/run.sh
+++ b/runtime/experiments/applications/run.sh
@@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
# Start the runtime
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
fi
payloads=(fivebyeight/5x8 handwriting/handwrt1 hyde/hyde)
ports=(10000 10001 10002)
@@ -29,7 +29,7 @@ iterations=1000
# Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: "
for i in {0..2}; do
- hey -n 200 -c 3 -q 200 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}"
+ hey -n 200 -c 3 -q 200 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}"
done
sleep 1
echo "[DONE]"
@@ -37,49 +37,49 @@ echo "[DONE]"
# Execute the experiments
echo "Running Experiments"
for i in {0..2}; do
- printf "\t%s Payload: " "${payloads[$i]}"
- file=$(echo "${payloads[$i]}" | awk -F/ '{print $2}').csv
- hey -n "$iterations" -c 3 -cpus 2 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" >"$results_directory/$file"
- echo "[DONE]"
+ printf "\t%s Payload: " "${payloads[$i]}"
+ file=$(echo "${payloads[$i]}" | awk -F/ '{print $2}').csv
+ hey -n "$iterations" -c 3 -cpus 2 -o csv -m GET -D "$experiment_directory/${payloads[$i]}.pnm" "http://localhost:${ports[$i]}" > "$results_directory/$file"
+ echo "[DONE]"
done
# Stop the runtime
if [ "$1" != "-d" ]; then
- sleep 5
- kill_runtime
+ sleep 5
+ kill_runtime
fi
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Concurrency,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Concurrency,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Con,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Concurrency,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Concurrency,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Con,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for payload in ${payloads[*]}; do
- # Calculate Success Rate for csv
- file=$(echo "$payload" | awk -F/ '{print $2}')
- awk -F, '
+ # Calculate Success Rate for csv
+ file=$(echo "$payload" | awk -F/ '{print $2}')
+ awk -F, '
$7 == 200 {ok++}
END{printf "'"$file"',%3.5f\n", (ok / '"$iterations"' * 100)}
- ' <"$results_directory/$file.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$file.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$file.csv" |
- sort -g >"$results_directory/$file-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$file.csv" \
+ | sort -g > "$results_directory/$file-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$file-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$file-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$file.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$file" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$file.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$file" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -92,16 +92,16 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$file-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$file-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- rm -rf "$results_directory/$file-response.csv"
+ # Delete scratch file used for sorting/counting
+ rm -rf "$results_directory/$file-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots
diff --git a/runtime/experiments/applications/speechtotext/debug.sh b/runtime/experiments/applications/speechtotext/debug.sh
index a561392..80784d0 100755
--- a/runtime/experiments/applications/speechtotext/debug.sh
+++ b/runtime/experiments/applications/speechtotext/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/applications/speechtotext/run.sh b/runtime/experiments/applications/speechtotext/run.sh
index 732089e..8ec7d83 100755
--- a/runtime/experiments/applications/speechtotext/run.sh
+++ b/runtime/experiments/applications/speechtotext/run.sh
@@ -10,14 +10,14 @@ binary_directory=$(cd "$project_directory"/bin && pwd)
# Copy License Plate Image if not here
if [[ ! -f "./samples/goforward.raw" ]]; then
- cp ../../../tests/speechtotext/goforward.raw ./samples/goforward.raw
+ cp ../../../tests/speechtotext/goforward.raw ./samples/goforward.raw
fi
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" &
+ sleep 1
else
- echo "Running under gdb"
+ echo "Running under gdb"
fi
# expected_size="$(find expected_result.jpg -printf "%s")"
@@ -25,29 +25,29 @@ success_count=0
total_count=50
for ((i = 0; i < total_count; i++)); do
- echo "$i"
- # ext="$RANDOM"
- curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@goforward.raw" localhost:10000 2>/dev/null
-
- # # echo "$result"
- # if [[ "$expected_size" == "$actual_size" ]]; then
- # echo "SUCCESS $success_count"
- # else
- # echo "FAIL"
- # echo "Expected Size:"
- # echo "$expected_size"
- # echo "==============================================="
- # echo "Actual Size:"
- # echo "$actual_size"
- # fi
+ echo "$i"
+ # ext="$RANDOM"
+ curl -H 'Expect:' -H "Content-Type: image/jpg" --data-binary "@goforward.raw" localhost:10000 2> /dev/null
+
+ # # echo "$result"
+ # if [[ "$expected_size" == "$actual_size" ]]; then
+ # echo "SUCCESS $success_count"
+ # else
+ # echo "FAIL"
+ # echo "Expected Size:"
+ # echo "$expected_size"
+ # echo "==============================================="
+ # echo "Actual Size:"
+ # echo "$actual_size"
+ # fi
done
echo "$success_count / $total_count"
if [ "$1" != "-d" ]; then
- sleep 5
- echo -n "Running Cleanup: "
- rm result_*.jpg
- pkill sledgert >/dev/null 2>/dev/null
- echo "[DONE]"
+ sleep 5
+ echo -n "Running Cleanup: "
+ rm result_*.jpg
+ pkill sledgert > /dev/null 2> /dev/null
+ echo "[DONE]"
fi
diff --git a/runtime/experiments/common.sh b/runtime/experiments/common.sh
index f40f4ca..28a932e 100644
--- a/runtime/experiments/common.sh
+++ b/runtime/experiments/common.sh
@@ -1,47 +1,47 @@
#!/bin/bash
log_environment() {
- echo "*******"
- echo "* Git *"
- echo "*******"
- git log | head -n 1 | cut -d' ' -f2
- git status
- echo ""
+ echo "*******"
+ echo "* Git *"
+ echo "*******"
+ git log | head -n 1 | cut -d' ' -f2
+ git status
+ echo ""
- echo "************"
- echo "* Makefile *"
- echo "************"
- cat ../../Makefile
- echo ""
+ echo "************"
+ echo "* Makefile *"
+ echo "************"
+ cat ../../Makefile
+ echo ""
- echo "**********"
- echo "* Run.sh *"
- echo "**********"
- cat run.sh
- echo ""
+ echo "**********"
+ echo "* Run.sh *"
+ echo "**********"
+ cat run.sh
+ echo ""
- echo "************"
- echo "* Hardware *"
- echo "************"
- lscpu
- echo ""
+ echo "************"
+ echo "* Hardware *"
+ echo "************"
+ lscpu
+ echo ""
- echo "*************"
- echo "* Execution *"
- echo "*************"
+ echo "*************"
+ echo "* Execution *"
+ echo "*************"
}
kill_runtime() {
- echo -n "Running Cleanup: "
- pkill sledgert >/dev/null 2>/dev/null
- pkill hey >/dev/null 2>/dev/null
- echo "[DONE]"
+ echo -n "Running Cleanup: "
+ pkill sledgert > /dev/null 2> /dev/null
+ pkill hey > /dev/null 2> /dev/null
+ echo "[DONE]"
}
generate_gnuplots() {
- cd "$results_directory" || exit
- gnuplot ../../latency.gnuplot
- gnuplot ../../success.gnuplot
- gnuplot ../../throughput.gnuplot
- cd "$experiment_directory" || exit
+ cd "$results_directory" || exit
+ gnuplot ../../latency.gnuplot
+ gnuplot ../../success.gnuplot
+ gnuplot ../../throughput.gnuplot
+ cd "$experiment_directory" || exit
}
diff --git a/runtime/experiments/concurrency/debug.sh b/runtime/experiments/concurrency/debug.sh
index 62c64e3..f40fa45 100755
--- a/runtime/experiments/concurrency/debug.sh
+++ b/runtime/experiments/concurrency/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/concurrency/run.sh b/runtime/experiments/concurrency/run.sh
index 65780df..c651c47 100755
--- a/runtime/experiments/concurrency/run.sh
+++ b/runtime/experiments/concurrency/run.sh
@@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
# Start the runtime
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
fi
iterations=10000
@@ -35,47 +35,47 @@ echo "[DONE]"
concurrency=(1 20 40 60 80 100)
echo "Running Experiments"
for conn in ${concurrency[*]}; do
- printf "\t%d Concurrency: " "$conn"
- hey -n "$iterations" -c "$conn" -cpus 2 -o csv -m GET http://localhost:10000 >"$results_directory/con$conn.csv"
- echo "[DONE]"
+ printf "\t%d Concurrency: " "$conn"
+ hey -n "$iterations" -c "$conn" -cpus 2 -o csv -m GET http://localhost:10000 > "$results_directory/con$conn.csv"
+ echo "[DONE]"
done
# Stop the runtime
if [ "$1" != "-d" ]; then
- sleep 5
- kill_runtime
+ sleep 5
+ kill_runtime
fi
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Concurrency,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Concurrency,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Con,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Concurrency,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Concurrency,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Con,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for conn in ${concurrency[*]}; do
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {ok++}
END{printf "'"$conn"',%3.5f\n", (ok / '"$iterations"' * 100)}
- ' <"$results_directory/con$conn.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/con$conn.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/con$conn.csv" |
- sort -g >"$results_directory/con$conn-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/con$conn.csv" \
+ | sort -g > "$results_directory/con$conn-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/con$conn-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/con$conn-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/con$conn.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%d,%f\n" "$conn" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/con$conn.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%d,%f\n" "$conn" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -88,16 +88,16 @@ for conn in ${concurrency[*]}; do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/con$conn-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/con$conn-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- rm -rf "$results_directory/con$conn-response.csv"
+ # Delete scratch file used for sorting/counting
+ rm -rf "$results_directory/con$conn-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots
diff --git a/runtime/experiments/deadline/client.sh b/runtime/experiments/deadline/client.sh
index 2449ce3..4063a83 100755
--- a/runtime/experiments/deadline/client.sh
+++ b/runtime/experiments/deadline/client.sh
@@ -13,7 +13,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
inputs=(40 10)
duration_sec=60
@@ -22,7 +22,7 @@ offset=5
# Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: "
for input in ${inputs[*]}; do
- hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input))
+ hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input))
done
echo "[DONE]"
sleep 5
@@ -30,7 +30,7 @@ sleep 5
echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
-hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv"
+hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 > "$results_directory/fib40-con.csv"
# sleep $offset
# hey -n 25000 -c 1000000 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" &
# sleep $((duration_sec + offset + 45))
@@ -38,43 +38,43 @@ hey -n 1000 -c 1000 -cpus 6 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000)
# durations_s=(60 70)
payloads=(fib10-con fib40-con)
for ((i = 1; i < 2; i++)); do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
- # duration=${durations_s[$i]}
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+ # duration=${durations_s[$i]}
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- # throughput=$(echo "$oks/$duration" | bc)
- # printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ # throughput=$(echo "$oks/$duration" | bc)
+ # printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -87,16 +87,16 @@ for ((i = 1; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/deadline/client2.sh b/runtime/experiments/deadline/client2.sh
index e5909ea..17990e8 100755
--- a/runtime/experiments/deadline/client2.sh
+++ b/runtime/experiments/deadline/client2.sh
@@ -14,7 +14,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
inputs=(40 10)
duration_sec=30
@@ -23,7 +23,7 @@ offset=5
# Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: "
for input in ${inputs[*]}; do
- hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input))
+ hey -n 16 -c 4 -t 0 -o csv -m GET -d "$input\n" http://${host}:$((10000 + input))
done
echo "[DONE]"
sleep 5
@@ -31,54 +31,54 @@ sleep 5
echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
-hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" &
+hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 > "$results_directory/fib40-con.csv" &
sleep $offset
-hey -z ${duration_sec}s -cpus 3 -c 200 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv" &
+hey -z ${duration_sec}s -cpus 3 -c 200 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15))
sleep 30
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con)
durations_s=(30 40)
for ((i = 0; i < 2; i++)); do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
- duration=${durations_s[$i]}
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+ duration=${durations_s[$i]}
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -91,16 +91,16 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/deadline/client3.sh b/runtime/experiments/deadline/client3.sh
index 391c53f..8d203c7 100755
--- a/runtime/experiments/deadline/client3.sh
+++ b/runtime/experiments/deadline/client3.sh
@@ -14,7 +14,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
inputs=(10)
duration_sec=30
@@ -31,52 +31,52 @@ echo "Running Experiments"
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
# hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 200 -t 0 -o csv -m GET -d "40\n" http://${host}:10040 >"$results_directory/fib40-con.csv" &
# sleep $offset
-hey -z ${duration_sec}s -cpus 6 -c 400 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 >"$results_directory/fib10-con.csv"
+hey -z ${duration_sec}s -cpus 6 -c 400 -t 0 -o csv -m GET -d "10\n" http://${host}:10010 > "$results_directory/fib10-con.csv"
# sleep $((duration_sec + offset + 15))
# sleep 30
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con)
durations_s=(30 40)
for ((i = 0; i < 1; i++)); do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
- duration=${durations_s[$i]}
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+ duration=${durations_s[$i]}
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -89,16 +89,16 @@ for ((i = 0; i < 1; i++)); do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/deadline/debug.sh b/runtime/experiments/deadline/debug.sh
index 62c64e3..f40fa45 100755
--- a/runtime/experiments/deadline/debug.sh
+++ b/runtime/experiments/deadline/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/deadline/fix_calcs.sh b/runtime/experiments/deadline/fix_calcs.sh
index cf13246..057a19f 100755
--- a/runtime/experiments/deadline/fix_calcs.sh
+++ b/runtime/experiments/deadline/fix_calcs.sh
@@ -10,42 +10,42 @@ results_directory="$experiment_directory/res/1606615320-fifo-adm"
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con)
for ((i = 0; i < 2; i++)); do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -58,16 +58,16 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/deadline/fix_calcs2.sh b/runtime/experiments/deadline/fix_calcs2.sh
index 86f0f13..5a1ec6e 100755
--- a/runtime/experiments/deadline/fix_calcs2.sh
+++ b/runtime/experiments/deadline/fix_calcs2.sh
@@ -15,7 +15,7 @@ results_directory="$experiment_directory/res/$timestamp"
log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
inputs=(40 10)
duration_sec=60
@@ -41,45 +41,45 @@ offset=5
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
deadlines_ms=(20 20000)
payloads=(fib10-con fib40-con)
durations_s=(60 70)
for ((i = 0; i < 2; i++)); do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
- duration=${durations_s[$i]}
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+ duration=${durations_s[$i]}
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {denom++}
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / denom * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -92,16 +92,16 @@ for ((i = 0; i < 2; i++)); do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/deadline/run.sh b/runtime/experiments/deadline/run.sh
index bf38f2a..0847d4f 100755
--- a/runtime/experiments/deadline/run.sh
+++ b/runtime/experiments/deadline/run.sh
@@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do
- results_directory="$experiment_directory/res/$timestamp/$scheduler"
- log=log.txt
-
- mkdir -p "$results_directory"
- log_environment >>"$results_directory/$log"
-
- # Start the runtime
- if [ "$1" != "-d" ]; then
- SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
- else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
- fi
-
- inputs=(40 10)
- duration_sec=15
- offset=5
-
- # Execute workloads long enough for runtime to learn excepted execution time
- echo -n "Running Samples: "
- for input in ${inputs[*]}; do
- hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
- done
- echo "[DONE]"
- sleep 5
-
- echo "Running Experiments"
- # Run each separately
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv"
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv"
-
- # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
- hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" &
- sleep $offset
- hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" &
- sleep $((duration_sec + offset + 15))
-
- # Stop the runtime if not in debug mode
- [ "$1" != "-d" ] && kill_runtime
-
- # Generate *.csv and *.dat results
- echo -n "Parsing Results: "
-
- printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
- printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
- printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
-
- deadlines_ms=(2 2 3000 3000)
- payloads=(fib10 fib10-con fib40 fib40-con)
-
- for ((i = 0; i < 4; i++)); do
- # for payload in ${payloads[*]}; do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
-
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
-
- # Calculate Success Rate for csv
- awk -F, '
+ results_directory="$experiment_directory/res/$timestamp/$scheduler"
+ log=log.txt
+
+ mkdir -p "$results_directory"
+ log_environment >> "$results_directory/$log"
+
+ # Start the runtime
+ if [ "$1" != "-d" ]; then
+ SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
+ else
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
+ fi
+
+ inputs=(40 10)
+ duration_sec=15
+ offset=5
+
+ # Execute workloads long enough for runtime to learn excepted execution time
+ echo -n "Running Samples: "
+ for input in ${inputs[*]}; do
+ hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
+ done
+ echo "[DONE]"
+ sleep 5
+
+ echo "Running Experiments"
+ # Run each separately
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
+
+ # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
+ hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
+ sleep $offset
+ hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
+ sleep $((duration_sec + offset + 15))
+
+ # Stop the runtime if not in debug mode
+ [ "$1" != "-d" ] && kill_runtime
+
+ # Generate *.csv and *.dat results
+ echo -n "Parsing Results: "
+
+ printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+ printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+ printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
+
+ deadlines_ms=(2 2 3000 3000)
+ payloads=(fib10 fib10-con fib40 fib40-con)
+
+ for ((i = 0; i < 4; i++)); do
+ # for payload in ${payloads[*]}; do
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
+
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
- done
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
+ done
- # Transform csvs to dat files for gnuplot
- for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
- done
+ # Transform csvs to dat files for gnuplot
+ for file in success latency throughput; do
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
+ done
- # Generate gnuplots. Commented out because we don't have *.gnuplots defined
- # generate_gnuplots
+ # Generate gnuplots. Commented out because we don't have *.gnuplots defined
+ # generate_gnuplots
- # Cleanup, if requires
- echo "[DONE]"
+ # Cleanup, if requires
+ echo "[DONE]"
done
diff --git a/runtime/experiments/deadline/run_relative.sh b/runtime/experiments/deadline/run_relative.sh
index bd072e4..31d6c2a 100755
--- a/runtime/experiments/deadline/run_relative.sh
+++ b/runtime/experiments/deadline/run_relative.sh
@@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do
- results_directory="$experiment_directory/res/$timestamp/$scheduler"
- log=log.txt
-
- mkdir -p "$results_directory"
- log_environment >>"$results_directory/$log"
-
- # Start the runtime
- if [ "$1" != "-d" ]; then
- SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
- else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
- fi
-
- inputs=(40 10)
- duration_sec=15
- offset=5
-
- # Execute workloads long enough for runtime to learn excepted execution time
- echo -n "Running Samples: "
- for input in ${inputs[*]}; do
- hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
- done
- echo "[DONE]"
- sleep 5
-
- echo "Running Experiments"
- # Run each separately
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv"
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv"
-
- # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
- hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" &
- sleep $offset
- hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" &
- sleep $((duration_sec + offset + 15))
-
- # Stop the runtime if not in debug mode
- [ "$1" != "-d" ] && kill_runtime
-
- # Generate *.csv and *.dat results
- echo -n "Parsing Results: "
-
- printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
- printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
- printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
-
- deadlines_ms=(2 2 3000 3000)
- payloads=(fib10 fib10-con fib40 fib40-con)
-
- for ((i = 0; i < 4; i++)); do
- # for payload in ${payloads[*]}; do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
-
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
-
- # Calculate Success Rate for csv
- awk -F, '
+ results_directory="$experiment_directory/res/$timestamp/$scheduler"
+ log=log.txt
+
+ mkdir -p "$results_directory"
+ log_environment >> "$results_directory/$log"
+
+ # Start the runtime
+ if [ "$1" != "-d" ]; then
+ SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
+ else
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
+ fi
+
+ inputs=(40 10)
+ duration_sec=15
+ offset=5
+
+ # Execute workloads long enough for runtime to learn excepted execution time
+ echo -n "Running Samples: "
+ for input in ${inputs[*]}; do
+ hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
+ done
+ echo "[DONE]"
+ sleep 5
+
+ echo "Running Experiments"
+ # Run each separately
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
+
+ # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
+ hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
+ sleep $offset
+ hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
+ sleep $((duration_sec + offset + 15))
+
+ # Stop the runtime if not in debug mode
+ [ "$1" != "-d" ] && kill_runtime
+
+ # Generate *.csv and *.dat results
+ echo -n "Parsing Results: "
+
+ printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+ printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+ printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
+
+ deadlines_ms=(2 2 3000 3000)
+ payloads=(fib10 fib10-con fib40 fib40-con)
+
+ for ((i = 0; i < 4; i++)); do
+ # for payload in ${payloads[*]}; do
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
+
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
- done
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
+ done
- # Transform csvs to dat files for gnuplot
- for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
- done
+ # Transform csvs to dat files for gnuplot
+ for file in success latency throughput; do
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
+ done
- # Generate gnuplots. Commented out because we don't have *.gnuplots defined
- # generate_gnuplots
+ # Generate gnuplots. Commented out because we don't have *.gnuplots defined
+ # generate_gnuplots
- # Cleanup, if requires
- echo "[DONE]"
+ # Cleanup, if requires
+ echo "[DONE]"
done
diff --git a/runtime/experiments/payload/body/generate.sh b/runtime/experiments/payload/body/generate.sh
index 70959c4..669e4dd 100755
--- a/runtime/experiments/payload/body/generate.sh
+++ b/runtime/experiments/payload/body/generate.sh
@@ -1,12 +1,12 @@
#!/bin/bash
# Generates payloads of 1KB, 10KB, 100KB, 1MB
for size in 1024 $((1024 * 10)) $((1024 * 100)) $((1024 * 1024)); do
- rm -rf $size.txt
- i=0
- echo -n "Generating $size:"
- while ((i < size)); do
- printf 'a' >>$size.txt
- ((i++))
- done
- echo "[DONE]"
+ rm -rf $size.txt
+ i=0
+ echo -n "Generating $size:"
+ while ((i < size)); do
+ printf 'a' >> $size.txt
+ ((i++))
+ done
+ echo "[DONE]"
done
diff --git a/runtime/experiments/payload/debug.sh b/runtime/experiments/payload/debug.sh
index 62c64e3..f40fa45 100755
--- a/runtime/experiments/payload/debug.sh
+++ b/runtime/experiments/payload/debug.sh
@@ -12,8 +12,8 @@ export LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH"
export PATH="$binary_directory:$PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/payload/run.sh b/runtime/experiments/payload/run.sh
index edde7f6..718c57e 100755
--- a/runtime/experiments/payload/run.sh
+++ b/runtime/experiments/payload/run.sh
@@ -12,15 +12,15 @@ log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
# Start the runtime
if [ "$1" != "-d" ]; then
- PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
+ PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
fi
payloads=(1024 10240 102400 1048576)
@@ -29,15 +29,15 @@ iterations=10000
# If the one of the expected body files doesn't exist, trigger the generation script.
for payload in ${payloads[*]}; do
- if test -f "$experiment_directory/body/$payload.txt"; then
- continue
- else
- echo "Generating Payloads: "
- {
- cd "$experiment_directory/body" && ./generate.sh
- }
- break
- fi
+ if test -f "$experiment_directory/body/$payload.txt"; then
+ continue
+ else
+ echo "Generating Payloads: "
+ {
+ cd "$experiment_directory/body" && ./generate.sh
+ }
+ break
+ fi
done
# Execute workloads long enough for runtime to learn excepted execution time
@@ -52,46 +52,46 @@ echo "[DONE]"
# Execute the experiments
echo "Running Experiments"
for i in {0..3}; do
- printf "\t%d Payload: " "${payloads[$i]}"
- hey -n "$iterations" -c 1 -cpus 2 -o csv -m GET -D "$experiment_directory/body/${payloads[$i]}.txt" http://localhost:"${ports[$i]}" >"$results_directory/${payloads[$i]}.csv"
- echo "[DONE]"
+ printf "\t%d Payload: " "${payloads[$i]}"
+ hey -n "$iterations" -c 1 -cpus 2 -o csv -m GET -D "$experiment_directory/body/${payloads[$i]}.txt" http://localhost:"${ports[$i]}" > "$results_directory/${payloads[$i]}.csv"
+ echo "[DONE]"
done
# Stop the runtime
if [ "$1" != "-d" ]; then
- sleep 5
- kill_runtime
+ sleep 5
+ kill_runtime
fi
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
for payload in ${payloads[*]}; do
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {ok++}
END{printf "'"$payload"',%3.5f\n", (ok / '"$iterations"' * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%d,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%d,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -104,16 +104,16 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots
diff --git a/runtime/experiments/preemption/backend.sh b/runtime/experiments/preemption/backend.sh
index 5bc869b..8d2ba89 100755
--- a/runtime/experiments/preemption/backend.sh
+++ b/runtime/experiments/preemption/backend.sh
@@ -12,7 +12,7 @@ results_directory="$experiment_directory/res/$timestamp/$scheduler"
log=log.txt
mkdir -p "$results_directory"
-log_environment >>"$results_directory/$log"
+log_environment >> "$results_directory/$log"
# Start the runtime
PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" | tee -a "$results_directory/$log"
diff --git a/runtime/experiments/preemption/client.sh b/runtime/experiments/preemption/client.sh
index d6ffff6..ad07df3 100755
--- a/runtime/experiments/preemption/client.sh
+++ b/runtime/experiments/preemption/client.sh
@@ -20,66 +20,66 @@ offset=5
# Execute workloads long enough for runtime to learn excepted execution time
echo -n "Running Samples: "
for input in ${inputs[*]}; do
- hey -n 45 -c 4 -t 0 -o csv -m GET -d "$input\n" http://"$host":$((10000 + input))
+ hey -n 45 -c 4 -t 0 -o csv -m GET -d "$input\n" http://"$host":$((10000 + input))
done
echo "[DONE]"
sleep 30
echo "Running Experiments"
# Run each separately
-hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" >"$results_directory/fib10.csv"
+hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" > "$results_directory/fib10.csv"
echo "fib(10) Complete"
sleep 60
-hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" >"$results_directory/fib40.csv"
+hey -z ${duration_sec}s -cpus 6 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" > "$results_directory/fib40.csv"
echo "fib(40) Complete"
sleep 120
# Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
-hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" >"$results_directory/fib40-con.csv" &
+hey -z $((duration_sec + 2 * offset))s -cpus 3 -c 100 -t 0 -o csv -m GET -d "40\n" "http://$host:10040" > "$results_directory/fib40-con.csv" &
sleep $offset
-hey -z ${duration_sec}s -cpus 3 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" >"$results_directory/fib10-con.csv" &
+hey -z ${duration_sec}s -cpus 3 -c 100 -t 0 -o csv -m GET -d "10\n" "http://$host:10010" > "$results_directory/fib10-con.csv" &
sleep $((duration_sec + offset + 15))
echo "fib(10) & fib(40) Complete"
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
durations_s=(15 15 15 25)
payloads=(fib10 fib10-con fib40 fib40-con)
for payload in ${payloads[*]}; do
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- duration=${durations_s[$i]}
+ duration=${durations_s[$i]}
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
-' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -92,16 +92,16 @@ for payload in ${payloads[*]}; do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
-' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/preemption/debug.sh b/runtime/experiments/preemption/debug.sh
index 079f6a0..4ef1936 100755
--- a/runtime/experiments/preemption/debug.sh
+++ b/runtime/experiments/preemption/debug.sh
@@ -13,8 +13,8 @@ export PATH="$binary_directory:$PATH"
export SLEDGE_SCHEDULER="EDF"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="handle SIGPIPE nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_directory" \
- --eval-command="run $experiment_directory/spec.json" \
- sledgert
+ --eval-command="handle SIGPIPE nostop" \
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_directory" \
+ --eval-command="run $experiment_directory/spec.json" \
+ sledgert
diff --git a/runtime/experiments/preemption/fix_results.sh b/runtime/experiments/preemption/fix_results.sh
index 83ef32d..3a527bd 100755
--- a/runtime/experiments/preemption/fix_results.sh
+++ b/runtime/experiments/preemption/fix_results.sh
@@ -11,41 +11,41 @@ results_directory="$experiment_directory/res/$timestamp"
# Generate *.csv and *.dat results
echo -n "Parsing Results: "
-printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
-printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
-printf "Payload,p50,p90,p99,p998,p999,p100\n" >>"$results_directory/latency.csv"
+printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+printf "Payload,p50,p90,p99,p998,p999,p100\n" >> "$results_directory/latency.csv"
durations_s=(15 15 15 25)
payloads=(fib10 fib10-con fib40 fib40-con)
for payload in ${payloads[*]}; do
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
- duration=${durations_s[$i]}
+ duration=${durations_s[$i]}
- # Calculate Success Rate for csv
- awk -F, '
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
-' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ # duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -62,16 +62,16 @@ for payload in ${payloads[*]}; do
NR==p998 {printf "%1.4f,", $0}
NR==p999 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
-' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
done
# Transform csvs to dat files for gnuplot
for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
done
# Generate gnuplots. Commented out because we don't have *.gnuplots defined
diff --git a/runtime/experiments/preemption/run.sh b/runtime/experiments/preemption/run.sh
index bf38f2a..0847d4f 100755
--- a/runtime/experiments/preemption/run.sh
+++ b/runtime/experiments/preemption/run.sh
@@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do
- results_directory="$experiment_directory/res/$timestamp/$scheduler"
- log=log.txt
-
- mkdir -p "$results_directory"
- log_environment >>"$results_directory/$log"
-
- # Start the runtime
- if [ "$1" != "-d" ]; then
- SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
- else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
- fi
-
- inputs=(40 10)
- duration_sec=15
- offset=5
-
- # Execute workloads long enough for runtime to learn excepted execution time
- echo -n "Running Samples: "
- for input in ${inputs[*]}; do
- hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
- done
- echo "[DONE]"
- sleep 5
-
- echo "Running Experiments"
- # Run each separately
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv"
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv"
-
- # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
- hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" &
- sleep $offset
- hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" &
- sleep $((duration_sec + offset + 15))
-
- # Stop the runtime if not in debug mode
- [ "$1" != "-d" ] && kill_runtime
-
- # Generate *.csv and *.dat results
- echo -n "Parsing Results: "
-
- printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
- printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
- printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
-
- deadlines_ms=(2 2 3000 3000)
- payloads=(fib10 fib10-con fib40 fib40-con)
-
- for ((i = 0; i < 4; i++)); do
- # for payload in ${payloads[*]}; do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
-
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
-
- # Calculate Success Rate for csv
- awk -F, '
+ results_directory="$experiment_directory/res/$timestamp/$scheduler"
+ log=log.txt
+
+ mkdir -p "$results_directory"
+ log_environment >> "$results_directory/$log"
+
+ # Start the runtime
+ if [ "$1" != "-d" ]; then
+ SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
+ else
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
+ fi
+
+ inputs=(40 10)
+ duration_sec=15
+ offset=5
+
+ # Execute workloads long enough for runtime to learn excepted execution time
+ echo -n "Running Samples: "
+ for input in ${inputs[*]}; do
+ hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
+ done
+ echo "[DONE]"
+ sleep 5
+
+ echo "Running Experiments"
+ # Run each separately
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
+
+ # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
+ hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
+ sleep $offset
+ hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
+ sleep $((duration_sec + offset + 15))
+
+ # Stop the runtime if not in debug mode
+ [ "$1" != "-d" ] && kill_runtime
+
+ # Generate *.csv and *.dat results
+ echo -n "Parsing Results: "
+
+ printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+ printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+ printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
+
+ deadlines_ms=(2 2 3000 3000)
+ payloads=(fib10 fib10-con fib40 fib40-con)
+
+ for ((i = 0; i < 4; i++)); do
+ # for payload in ${payloads[*]}; do
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
+
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f,", $0}
NR==p99 {printf "%1.4f,", $0}
NR==p100 {printf "%1.4f\n", $0}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
- done
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
+ done
- # Transform csvs to dat files for gnuplot
- for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
- done
+ # Transform csvs to dat files for gnuplot
+ for file in success latency throughput; do
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
+ done
- # Generate gnuplots. Commented out because we don't have *.gnuplots defined
- # generate_gnuplots
+ # Generate gnuplots. Commented out because we don't have *.gnuplots defined
+ # generate_gnuplots
- # Cleanup, if requires
- echo "[DONE]"
+ # Cleanup, if requires
+ echo "[DONE]"
done
diff --git a/runtime/experiments/preemption/run_relative.sh b/runtime/experiments/preemption/run_relative.sh
index bd072e4..31d6c2a 100755
--- a/runtime/experiments/preemption/run_relative.sh
+++ b/runtime/experiments/preemption/run_relative.sh
@@ -11,87 +11,87 @@ binary_directory=$(cd ../../bin && pwd)
schedulers=(EDF FIFO)
for scheduler in ${schedulers[*]}; do
- results_directory="$experiment_directory/res/$timestamp/$scheduler"
- log=log.txt
-
- mkdir -p "$results_directory"
- log_environment >>"$results_directory/$log"
-
- # Start the runtime
- if [ "$1" != "-d" ]; then
- SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >>"$results_directory/$log" 2>>"$results_directory/$log" &
- sleep 1
- else
- echo "Running under gdb"
- echo "Running under gdb" >>"$results_directory/$log"
- fi
-
- inputs=(40 10)
- duration_sec=15
- offset=5
-
- # Execute workloads long enough for runtime to learn excepted execution time
- echo -n "Running Samples: "
- for input in ${inputs[*]}; do
- hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
- done
- echo "[DONE]"
- sleep 5
-
- echo "Running Experiments"
- # Run each separately
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40.csv"
- hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10.csv"
-
- # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
- hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 >"$results_directory/fib40-con.csv" &
- sleep $offset
- hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 >"$results_directory/fib10-con.csv" &
- sleep $((duration_sec + offset + 15))
-
- # Stop the runtime if not in debug mode
- [ "$1" != "-d" ] && kill_runtime
-
- # Generate *.csv and *.dat results
- echo -n "Parsing Results: "
-
- printf "Payload,Success_Rate\n" >>"$results_directory/success.csv"
- printf "Payload,Throughput\n" >>"$results_directory/throughput.csv"
- printf "Payload,p50,p90,p99,p100\n" >>"$results_directory/latency.csv"
-
- deadlines_ms=(2 2 3000 3000)
- payloads=(fib10 fib10-con fib40 fib40-con)
-
- for ((i = 0; i < 4; i++)); do
- # for payload in ${payloads[*]}; do
- payload=${payloads[$i]}
- deadline=${deadlines_ms[$i]}
-
- # Get Number of Requests
- requests=$(($(wc -l <"$results_directory/$payload.csv") - 1))
- ((requests == 0)) && continue
-
- # Calculate Success Rate for csv
- awk -F, '
+ results_directory="$experiment_directory/res/$timestamp/$scheduler"
+ log=log.txt
+
+ mkdir -p "$results_directory"
+ log_environment >> "$results_directory/$log"
+
+ # Start the runtime
+ if [ "$1" != "-d" ]; then
+ SLEDGE_NWORKERS=5 SLEDGE_SCHEDULER=$scheduler PATH="$binary_directory:$PATH" LD_LIBRARY_PATH="$binary_directory:$LD_LIBRARY_PATH" sledgert "$experiment_directory/spec.json" >> "$results_directory/$log" 2>> "$results_directory/$log" &
+ sleep 1
+ else
+ echo "Running under gdb"
+ echo "Running under gdb" >> "$results_directory/$log"
+ fi
+
+ inputs=(40 10)
+ duration_sec=15
+ offset=5
+
+ # Execute workloads long enough for runtime to learn excepted execution time
+ echo -n "Running Samples: "
+ for input in ${inputs[*]}; do
+ hey -z ${duration_sec}s -cpus 3 -t 0 -o csv -m GET -d "$input\n" http://localhost:$((10000 + input))
+ done
+ echo "[DONE]"
+ sleep 5
+
+ echo "Running Experiments"
+ # Run each separately
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40.csv"
+ hey -z ${duration_sec}s -cpus 4 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10.csv"
+
+ # Run lower priority first, then higher priority. The lower priority has offsets to ensure it runs the entire time the high priority is trying to run
+ hey -z $((duration_sec + 2 * offset))s -cpus 2 -c 100 -t 0 -o csv -m GET -d "40\n" http://localhost:10040 > "$results_directory/fib40-con.csv" &
+ sleep $offset
+ hey -z ${duration_sec}s -cpus 2 -c 100 -t 0 -o csv -m GET -d "10\n" http://localhost:10010 > "$results_directory/fib10-con.csv" &
+ sleep $((duration_sec + offset + 15))
+
+ # Stop the runtime if not in debug mode
+ [ "$1" != "-d" ] && kill_runtime
+
+ # Generate *.csv and *.dat results
+ echo -n "Parsing Results: "
+
+ printf "Payload,Success_Rate\n" >> "$results_directory/success.csv"
+ printf "Payload,Throughput\n" >> "$results_directory/throughput.csv"
+ printf "Payload,p50,p90,p99,p100\n" >> "$results_directory/latency.csv"
+
+ deadlines_ms=(2 2 3000 3000)
+ payloads=(fib10 fib10-con fib40 fib40-con)
+
+ for ((i = 0; i < 4; i++)); do
+ # for payload in ${payloads[*]}; do
+ payload=${payloads[$i]}
+ deadline=${deadlines_ms[$i]}
+
+ # Get Number of Requests
+ requests=$(($(wc -l < "$results_directory/$payload.csv") - 1))
+ ((requests == 0)) && continue
+
+ # Calculate Success Rate for csv
+ awk -F, '
$7 == 200 && ($1 * 1000) <= '"$deadline"' {ok++}
END{printf "'"$payload"',%3.5f%\n", (ok / (NR - 1) * 100)}
- ' <"$results_directory/$payload.csv" >>"$results_directory/success.csv"
+ ' < "$results_directory/$payload.csv" >> "$results_directory/success.csv"
- # Filter on 200s, convery from s to ms, and sort
- awk -F, '$7 == 200 {print ($1 * 1000)}' <"$results_directory/$payload.csv" |
- sort -g >"$results_directory/$payload-response.csv"
+ # Filter on 200s, convery from s to ms, and sort
+ awk -F, '$7 == 200 {print ($1 * 1000)}' < "$results_directory/$payload.csv" \
+ | sort -g > "$results_directory/$payload-response.csv"
- # Get Number of 200s
- oks=$(wc -l <"$results_directory/$payload-response.csv")
- ((oks == 0)) && continue # If all errors, skip line
+ # Get Number of 200s
+ oks=$(wc -l < "$results_directory/$payload-response.csv")
+ ((oks == 0)) && continue # If all errors, skip line
- # Get Latest Timestamp
- duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
- throughput=$(echo "$oks/$duration" | bc)
- printf "%s,%f\n" "$payload" "$throughput" >>"$results_directory/throughput.csv"
+ # Get Latest Timestamp
+ duration=$(tail -n1 "$results_directory/$payload.csv" | cut -d, -f8)
+ throughput=$(echo "$oks/$duration" | bc)
+ printf "%s,%f\n" "$payload" "$throughput" >> "$results_directory/throughput.csv"
- # Generate Latency Data for csv
- awk '
+ # Generate Latency Data for csv
+ awk '
BEGIN {
sum = 0
p50 = int('"$oks"' * 0.5)
@@ -104,21 +104,21 @@ for scheduler in ${schedulers[*]}; do
NR==p90 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p99 {printf "%1.4f%,", $0 / '"$deadline"' * 100}
NR==p100 {printf "%1.4f%\n", $0 / '"$deadline"' * 100}
- ' <"$results_directory/$payload-response.csv" >>"$results_directory/latency.csv"
+ ' < "$results_directory/$payload-response.csv" >> "$results_directory/latency.csv"
- # Delete scratch file used for sorting/counting
- # rm -rf "$results_directory/$payload-response.csv"
- done
+ # Delete scratch file used for sorting/counting
+ # rm -rf "$results_directory/$payload-response.csv"
+ done
- # Transform csvs to dat files for gnuplot
- for file in success latency throughput; do
- echo -n "#" >"$results_directory/$file.dat"
- tr ',' ' ' <"$results_directory/$file.csv" | column -t >>"$results_directory/$file.dat"
- done
+ # Transform csvs to dat files for gnuplot
+ for file in success latency throughput; do
+ echo -n "#" > "$results_directory/$file.dat"
+ tr ',' ' ' < "$results_directory/$file.csv" | column -t >> "$results_directory/$file.dat"
+ done
- # Generate gnuplots. Commented out because we don't have *.gnuplots defined
- # generate_gnuplots
+ # Generate gnuplots. Commented out because we don't have *.gnuplots defined
+ # generate_gnuplots
- # Cleanup, if requires
- echo "[DONE]"
+ # Cleanup, if requires
+ echo "[DONE]"
done
diff --git a/runtime/tests/fibonacci/run_fib.sh b/runtime/tests/fibonacci/run_fib.sh
index 4463b8a..438b4ec 100755
--- a/runtime/tests/fibonacci/run_fib.sh
+++ b/runtime/tests/fibonacci/run_fib.sh
@@ -5,15 +5,14 @@ ITERS=$3
# before running this benchmark,
# copy fibonacci to fibonacci_native.out
-testeach()
-{
+testeach() {
tmp_cnt=${ITERS}
exe_relpath=$1
echo "${exe_relpath} ($2) for ${tmp_cnt}"
while [ ${tmp_cnt} -gt 0 ]; do
- bench=$(echo $2 | $exe_relpath 2>/dev/null)
+ bench=$(echo $2 | $exe_relpath 2> /dev/null)
tmp_cnt=$((tmp_cnt - 1))
echo "$bench"
done
@@ -25,7 +24,7 @@ MAXNUM=$2
tmp1_cnt=${MAXNUM}
-while [ ${tmp1_cnt} -gt 28 ]; do
+while [ ${tmp1_cnt} -gt 28 ]; do
testeach ./fibonacci_$1.out ${tmp1_cnt}
tmp1_cnt=$((tmp1_cnt - 1))
done
diff --git a/runtime/tests/preemption/debug.sh b/runtime/tests/preemption/debug.sh
index 7c9c027..27a1b63 100755
--- a/runtime/tests/preemption/debug.sh
+++ b/runtime/tests/preemption/debug.sh
@@ -5,15 +5,15 @@
# Also disables pagination and stopping on SIGUSR1
declare project_path="$(
- cd "$(dirname "$1")/../.."
- pwd
+ cd "$(dirname "$1")/../.."
+ pwd
)"
echo $project_path
cd ../../bin
export LD_LIBRARY_PATH="$(pwd):$LD_LIBRARY_PATH"
gdb --eval-command="handle SIGUSR1 nostop" \
- --eval-command="set pagination off" \
- --eval-command="set substitute-path /sledge/runtime $project_path" \
- --eval-command="run ../tests/preemption/test_fibonacci_multiple.json" \
- ./sledgert
+ --eval-command="set pagination off" \
+ --eval-command="set substitute-path /sledge/runtime $project_path" \
+ --eval-command="run ../tests/preemption/test_fibonacci_multiple.json" \
+ ./sledgert
cd ../../tests
diff --git a/test.sh b/test.sh
index c8ca706..3418edb 100755
--- a/test.sh
+++ b/test.sh
@@ -2,8 +2,8 @@
# Test Driver Script
if [[ $0 != "./test.sh" ]]; then
- echo "Must run in same directory as ./test.sh"
- exit 1
+ echo "Must run in same directory as ./test.sh"
+ exit 1
fi
base_dir=$(pwd)