summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam M. Stück <adam@adast.xyz>2022-11-23 11:37:28 +0100
committerAdam M. Stück <adam@adast.xyz>2022-12-16 22:17:18 +0100
commitc3dd7c95e17523e77e8558b9cb57176bc8275bea (patch)
tree8e496dc82dbfe2e07c033f6728dad8a4a6d90d6e
Improved file structure
-rw-r--r--.gitignore1
-rwxr-xr-xarchiver65
-rwxr-xr-xcloud-upload12
-rwxr-xr-xrun-model83
-rwxr-xr-xstart95
-rwxr-xr-xupdate-binary9
6 files changed, 265 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..1407935
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+cloud-auth
diff --git a/archiver b/archiver
new file mode 100755
index 0000000..1c0bc79
--- /dev/null
+++ b/archiver
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+RESULTS_PATH=$1
+
+main() {
+ if [ $# -eq 0 ]; then
+ help; exit 1
+ fi
+
+ case "$1" in
+ -h|--help)
+ help; exit ;;
+ esac
+
+ if [[ ! -d "$RESULTS_PATH" ]]; then
+ echo "error: invalid results path"
+ exit 1
+ fi
+
+ aggregate_files
+}
+
+aggregate_files() {
+ OUTPUT=""
+ HEADER_INSERTED=false
+
+ while IFS= read -r -d '' ENTRY
+ do
+ if [ $HEADER_INSERTED == true ]; then
+ FILE=$(tail -n +2 "$ENTRY")
+ else
+ FILE=$(cat "$ENTRY")
+ fi
+
+ OUTPUT+="$FILE\n"
+ HEADER_INSERTED=true
+ done < <(find "$RESULTS_PATH" -maxdepth 1 -type f -print0)
+
+ HEADER=$(echo -e "$OUTPUT" | head -n 1)
+ ALL_ROWS=$(echo -e "$OUTPUT" | tail -n +2 | sort -t$'\t' -k6,6 -n)
+ STRATS=$(echo -e "$ALL_ROWS" | awk -F '\t' '{print $5}' | sort | uniq)
+
+ OUTPUT=""
+ while read -r STRAT; do
+ [ -z "$STRAT" ] && continue
+ ROWS=$(echo -e "$ALL_ROWS" | grep -P "\t$STRAT\t")
+ OUTPUT+="$ROWS\n"
+ done <<< "$STRATS"
+
+ OUTPUT=$(echo -e "$HEADER\n$OUTPUT" | head -n -1)
+ echo -e "$OUTPUT" > "$RESULTS_PATH/data.csv"
+}
+
+help() {
+cat << EOF
+usage: $0 RESULTS-DIR
+
+Aggregate data from search stragey benchmark
+
+Options:
+ -h, --help Show this message
+EOF
+}
+
+main "$@"
diff --git a/cloud-upload b/cloud-upload
new file mode 100755
index 0000000..3f0a454
--- /dev/null
+++ b/cloud-upload
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+PREFIX=$(dirname "$0")
+CLOUD_AUTH=$(<"$PREFIX/cloud-auth")
+UPLOAD_PATH="https://cloud.adast.xyz/public.php/webdav/uploads"
+
+if [ -f "$1" ]; then
+ FILE=$(basename "$1")
+ curl -T "$1" -u "$CLOUD_AUTH" "$UPLOAD_PATH/$FILE"
+else
+ echo "error: invalid file $1"
+fi
diff --git a/run-model b/run-model
new file mode 100755
index 0000000..fe8aea8
--- /dev/null
+++ b/run-model
@@ -0,0 +1,83 @@
+#!/bin/bash
+#SBATCH --mem=15000
+#SBATCH --output=/dev/null
+#SBATCH --exclude=naples01,dhabi01,rome01
+
+TIME_CMD="/usr/bin/time -f \"@@@%M@@@\" "
+
+# THIS SCRIPT IS NOT INTENDED TO BE RUN MANUALLY, LOOK AT 'start' SCRIPT
+
+main () {
+ BINARY=$1
+ TIME_LIMIT=$2 # in seconds per query
+ STRAT_STR="$3"
+ MODEL_PATH="$4"
+ RESULT_PATH="$5"
+
+ lscpu > "$RESULT_PATH.hardwareinfo"
+
+ DATE=$(date -u +"%d/%m/%Y %H.%M.%S")
+ OUTPUT="model\tquery\tsolved\tresult\tstrategy\ttime\tdate\ttime-limit\tmemory\texit-code\tformula\ttimed-out\terror-msg\tdiscoveredStates\texploredStates\texpandedStates\tmaxTokens\tsearchTime"
+
+ MODEL=$(basename "$MODEL_PATH")
+
+ deserialize_array STRAT_STR STRATEGIES '|'
+
+ for STRAT in "${STRATEGIES[@]}"; do
+ for QUERY in $(seq 16); do
+ START_TIME=$(date +%s.%N)
+ TMP=$($TIME_CMD timeout $TIME_LIMIT $BINARY -n -x $QUERY -s $STRAT $MODEL_PATH/model.pnml $MODEL_PATH/ReachabilityCardinality.xml 2>&1)
+ RETVAL=$?
+
+ ERROR_MSG=""
+ if [[ "$RETVAL" -ne 0 ]]; then
+ ERROR_MSG=$(echo "$TMP" | tr -d '\n' | sed 's|\t| |g')
+ fi
+
+ END_TIME=$(date +%s.%N)
+ TIME=$(echo "$END_TIME - $START_TIME" | bc -l)
+ if [[ $TIME =~ ^[.] ]]; then
+ TIME="0$TIME"
+ fi
+
+ MEM_USED=$(grep -oP "(?<=@@@).*(?=@@@)" <<< "$TMP")
+
+ TIMED_OUT="FALSE"
+ if grep -qm 1 'Command exited with non-zero status 124' <<< "$TMP"; then
+ TIMED_OUT="TRUE"
+ fi
+
+ FORMULA="FALSE"
+ if grep -qm 1 'FORMULA' <<< "$TMP"; then
+ FORMULA="TRUE"
+ fi
+
+ SOLVED="FALSE"
+ if grep -qm 1 'was solved' <<< "$TMP"; then
+ SOLVED="TRUE"
+ fi
+
+ RESULT="NULL"
+ if grep -qm 1 'Query is satisfied.' <<< "$TMP"; then
+ RESULT="TRUE"
+ elif grep -qm 1 'Query is NOT satisfied.' <<< "$TMP"; then
+ RESULT="FALSE"
+ fi
+
+ DISCOVERED_STATES=$(echo "$TMP" | grep -m 1 'discovered states:' | grep -Eo '[0-9]+$')
+ EXPLORED_STATES=$(echo "$TMP" | grep -m 1 'explored states:' | grep -Eo '[0-9]+$')
+ EXPANDED_STATES=$(echo "$TMP" | grep -m 1 'expanded states:' | grep -Eo '[0-9]+$')
+ MAX_TOKENS=$(echo "$TMP" | grep -m 1 'max tokens:' | grep -Eo '[0-9]+$')
+ SEARCH_TIME=$(echo "$TMP" | grep -m 1 'search time taken:' | grep -Eo "[0-9]+\.[0-9]+")
+ OUTPUT+="\n$MODEL\t$QUERY\t$SOLVED\t$RESULT\t$STRAT\t$TIME\t$DATE\t$TIME_LIMIT\t$MEM_USED\t$RETVAL\t$FORMULA\t$TIMED_OUT\t$ERROR_MSG\t$DISCOVERED_STATES\t$EXPLORED_STATES\t$EXPANDED_STATES\t$MAX_TOKENS\t$SEARCH_TIME"
+ done
+ done
+
+ echo -e "$OUTPUT" > "$RESULT_PATH"
+}
+
+deserialize_array() {
+ IFS="${3:-$'\x01'}" read -r -a "${2}" <<<"${!1}" # -a => split on IFS
+}
+
+main "$@"
diff --git a/start b/start
new file mode 100755
index 0000000..596d81c
--- /dev/null
+++ b/start
@@ -0,0 +1,95 @@
+#!/bin/bash
+
+# SETUP
+
+TIME_LIMIT=300 # In seconds, per query
+NUM_PROBLEMS=1350 # First n problems from models path (e.g. MCC2022/)
+
+STRATEGIES=(
+ "RDFS --seed-offset 5760351"
+ "BestFS"
+ "DFS"
+ "BFS"
+)
+
+# DO NOT CHANGE THIS BELOW THIS POINT
+
+PIPELINE_PATH=$(dirname "$0")
+
+main() {
+ if [ $# -eq 0 ]; then
+ help; exit 1
+ fi
+
+ USE_SBATCH="false"
+
+ case "$1" in
+ -h|--help)
+ help
+ exit
+ ;;
+ -s|--sbatch)
+ USE_SBATCH="true"
+ MODELS_PATH="$2"
+ OUTPUT_PATH="$3"
+ ;;
+ *)
+ MODELS_PATH="$1"
+ OUTPUT_PATH="$2"
+ ;;
+ esac
+
+ if [[ ! -d "$MODELS_PATH" ]]; then
+ echo "error: invalid models path"
+ exit 1
+ fi
+
+ OUTPUT_PATH="$OUTPUT_PATH/$(date -u +"%d-%m-%YT%H.%M.%S")"
+ mkdir -p "$OUTPUT_PATH/pipeline"
+ cp -r "$PIPELINE_PATH" "$OUTPUT_PATH"
+ start_jobs $USE_SBATCH "$MODELS_PATH"
+}
+
+start_jobs() {
+ USE_SBATCH=$1
+ MODELS_PATH=$2
+
+ STRAT_LEN=${#STRATEGIES[@]}
+ serialize_array STRATEGIES STRAT_STR "|"
+
+ ALL_MODELS=$(find "$MODELS_PATH" -type d -printf '%f\n' | tail -n+2 | sort | head -n "$NUM_PROBLEMS")
+
+ TOTAL_TIME_LIMIT=$(echo "($TIME_LIMIT * $STRAT_LEN * 16) / 60 + 1" | bc)
+
+ for MODEL in $ALL_MODELS; do
+ if [ "$USE_SBATCH" == "true" ]; then
+ sbatch --time "$TOTAL_TIME_LIMIT" "$PIPELINE_PATH/run-model" "$PIPELINE_PATH/verifypn-linux64" "$TIME_LIMIT" "$STRAT_STR" "$MODELS_PATH/$MODEL" "$OUTPUT_PATH/$MODEL.csv"
+ else
+ timeout "${TOTAL_TIME_LIMIT}m" "$PIPELINE_PATH/run-model" "$PIPELINE_PATH/verifypn-linux64" "$TIME_LIMIT" "$STRAT_STR" "$MODELS_PATH/$MODEL" "$OUTPUT_PATH/$MODEL.csv"
+ fi
+ done
+}
+
+serialize_array() {
+ declare -n _array="${1}" _str="${2}" # _array, _str => local reference vars
+ local IFS="${3:-$'\x01'}"
+ # shellcheck disable=SC2034 # Reference vars assumed used by caller
+ _str="${_array[*]}" # * => join on IFS
+}
+
+help() {
+cat << EOF
+usage: $0 [OPTIONS] MODELS-PATH OUTPUT-PATH
+
+Run Tapaal/MCC search strategy benchmark.
+
+Number of models to run, time limit and search strategies can be
+configured in the top of this script.
+
+Options:
+ -h, --help Show this message
+ -s, --sbatch Run on the cluster, instead of locally.
+EOF
+}
+
+main "$@"
diff --git a/update-binary b/update-binary
new file mode 100755
index 0000000..9713994
--- /dev/null
+++ b/update-binary
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+PREFIX=$(dirname "$0")
+CLOUD_AUTH=$(<"$PREFIX/cloud-auth")
+BIN_PATH="https://cloud.adast.xyz/public.php/webdav/uploads"
+BIN_NAME="verifypn-linux64"
+
+curl -u "$CLOUD_AUTH" -s "$BIN_PATH/$BIN_NAME" --output "$PREFIX/$BIN_NAME"
+chmod +x "$PREFIX/$BIN_NAME"