3 # Does a bonnie throughput run (80M file and 1K chunksize if the
4 # BONNIE_FILESIZE and/or BONNIE_CHUNKSIZE variables are not set),
5 # then massages the output into CSV format with the human-readable
6 # output preceding it as a "comment" (ie. #-prefixed).
8 BONNIE_FILESIZE=${BONNIE_FILESIZE:=500M}
9 BONNIE_CHUNKSIZE=${BONNIE_CHUNKSIZE:=1K}
11 [ -z "$here" ] && here=`pwd`
15 # Sample bonnie throughput output (stderr):
16 #Version 1.02c ------Sequential Output------ --Sequential Input- --Random-
17 # -Per Chr- --Block-- -Rewrite- -Per Chr- --Block-- --Seeks--
18 #Machine Size:chnk K/sec %CP K/sec %CP K/sec %CP K/sec %CP K/sec %CP /sec %CP
19 # 150M:64k 52024 99 88969 99 245492 100 3746 264
23 sed -e 's/^..................../# /g' | awk '{print} END {print "#"}'
27 # Sample bonnie throughput output (stdout):
28 # ",150M:64k,,,52024,99,88969,99,,,245492,100,3746.0,264,,,,,,,,,,,,,"
34 s/,+/,/g; s/^,//; s/,$//;
36 printf "%9s", shift @values;
37 for ($i = 0; $i <= $#values; $i++) {
38 if ($i % 2) { printf ",%4s%%", $values[$i] }
39 else { printf ",%10s", $values[$i] }
46 printf "%9s,%10s,%5s,%10s,%5s,%10s,%5s,%10s,%5s\n" size:chnk \
47 writeK/s wCPU rewriteK/s rwCPU readK/s rCPU seek/s sCPU
50 run_bonnie -n 0 -s $BONNIE_FILESIZE:$BONNIE_CHUNKSIZE