3 # Does a bonnie throughput run (80M file and 1K chunksize if the
4 # BONNIE_FILESIZE and/or BONNIE_CHUNKSIZE variables are not set),
5 # then massages the output into CSV format with the human-readable
6 # output preceding it as a "comment" (ie. #-prefixed).
8 BONNIE_FILESIZE=${BONNIE_FILESIZE:=500M}
9 BONNIE_CHUNKSIZE=${BONNIE_CHUNKSIZE:=1K}
14 # Sample bonnie throughput output (stderr):
15 #Version 1.02c ------Sequential Output------ --Sequential Input- --Random-
16 # -Per Chr- --Block-- -Rewrite- -Per Chr- --Block-- --Seeks--
17 #Machine Size:chnk K/sec %CP K/sec %CP K/sec %CP K/sec %CP K/sec %CP /sec %CP
18 # 150M:64k 52024 99 88969 99 245492 100 3746 264
22 sed -e 's/^..................../# /g' | awk '{print} END {print "#"}'
26 # Sample bonnie throughput output (stdout):
27 # ",150M:64k,,,52024,99,88969,99,,,245492,100,3746.0,264,,,,,,,,,,,,,"
31 tr -s ',' | sed -e 's/^,//' -e 's/,$//'
35 echo "size:chnk,writeK/s,wCPU,rewriteK/s,rwCPU,readK/s,rCPU,seek/s,sCPU"
38 run_bonnie -n 0 -s $BONNIE_FILESIZE:$BONNIE_CHUNKSIZE