-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathrun_benchmarks.sh
executable file
·132 lines (97 loc) · 3.82 KB
/
run_benchmarks.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
#!/bin/bash
echo "Begin running jenkins benchmark script for concurrent skiplist. First use regression script to build packages:"
set -x
# Step 0: Configuration options:
# ================================================================================
# Extra arguments are given to "cabal install"
EXTRAARGS=$*
TABLENAME=AdaptivelyScalable
# All regressions to be performed by Criterion:
REGRESSES="--regress=allocated:iters --regress=bytesCopied:iters --regress=cycles:iters \
--regress=numGcs:iters --regress=mutatorWallSeconds:iters --regress=gcWallSeconds:iters \
--regress=cpuTime:iters "
# Parfunc account LVish uploader project:
# CID=820162629229-kp29aklebt6ucos5a71u8tu3hu8unres.apps.googleusercontent.com
# SEC=pSsMxVAJCFKyWsazuxZVRZwX
# Over limit on [2014.11.12]
# if [[ "$HOSTNAME" =~ cutter ]]; then
# # Google API Project specific to CompactNF
# CID=155144430612-v8nb20thmtg3eflt5bkkl40dssk6glnr.apps.googleusercontent.com
# SEC=9CEIm4AbJt5aSbw_sYpi85gi
# export MACHINECLASS=cutter
# elif [ "$MACHINECLASS" == "swarm" ]; then
# # Using generic uploader because we're over limit:
# # Generic 2:
# CID=546809307027-8tm2lp5gtqg5o3pn3s016gd6467cf7j3.apps.googleusercontent.com
# SEC=148aQ08EPpgkb0DiYVLoT9X2
# else
# Generic 3:
CID=759282369766-ijonhc4662ot2qos4lgud0e0sltjshlj.apps.googleusercontent.com
SEC=yI8GfZXsHPrW44udqklCHeDH
# fi
# Step 1: Examine environment
# ================================================================================
if [ "$MACHINECLASS" == "" ]; then
export MACHINECLASS=`hostname -s`
fi
echo "On linux platforms, check CPU affinity:"
taskset -pc $$ || echo ok
echo "Also check load:"
sar 2 2 || echo ok
echo "And who"
who -a || echo ok
# Switch to the top of the repo:
cd `dirname $0`
# Install dependencies and build the code:
# NOTEST=1 ./.jenkins_script.sh -j
echo "\nReturned to benchmarking script."
# CONVENTION: The working directory is passed as the first argument.
CHECKOUT=$1
shift || echo ok
if [ "$CHECKOUT" == "" ]; then
CHECKOUT=`pwd`
fi
if [ "$JENKINS_GHC" == "" ]; then
echo "JENKINS_GHC unset"
export JENKINS_GHC=7.8.3
fi
echo "Running benchmarks remotely on server `hostname`"
if [ -f "$HOME/continuous_testing_setup/rn_jenkins_scripts/acquire_ghc.sh" ]; then
source $HOME/continuous_testing_setup/rn_jenkins_scripts/acquire_ghc.sh
fi
which cabal
cabal --version
which ghc
ghc --version
# BKUP=$HOME/criterion_reports/
BKUP=$HOME/results_backup_adaptively_scalable/
mkdir -p $BKUP
mkdir -p ./old_reports/
mv report_* ./old_reports/
gitdepth=`git log --pretty=oneline | wc -l`
set -e
# Step 1: build or acquire HSBencher stuff
# ================================================================================
# For now we just preinstall these hsbencher executables:
CSVUPLOAD=hsbencher-fusion-upload-csv-0.3.7
CRITUPLOAD=hsbencher-fusion-upload-criterion-0.3.7
# Step 2: Run benchmarks
# ================================================================================
executable=bench-concurrent-skiplist
cabal sandbox init
TAG=`date +'%s'`
echo "Installing benchmark program."
which -a ghc-$JENKINS_GHC
cabal install -w ghc-$JENKINS_GHC --with-ghc-pkg=ghc-pkg-$JENKINS_GHC --enable-benchmarks $EXTRAARGS
cabal configure --enable-benchmarks
cabal build ${executable}
# Vary parameter?
# for ((depth=1; depth<=$MAXTREEHEIGHT; depth++)); do
REPORT=report_${executable}_${TAG}_${depth}
CRITREPORT=$REPORT.crit
CSVREPORT=$REPORT.csv
time ./dist/build/$executable/$executable --raw $CRITREPORT $REGRESSES +RTS -T -s
# Convert to raw .csv file for backup and bulk upload:
$CRITUPLOAD --noupload --csv=$CSVREPORT --variant=$VARIANT --args=$depth $CRITREPORT
cp $CSVREPORT ${BKUP}/${gitdepth}_${CSVREPORT} || echo "Hmm, why did that copy fail?"
$CSVUPLOAD $CSVREPORT --fusion-upload --name=$TABLENAME --clientid=$CID --clientsecret=$SEC