#!/bin/sh # # Does a bonnie throughput run (80M file and 1K chunksize if the # BONNIE_FILESIZE and/or BONNIE_CHUNKSIZE variables are not set), # then massages the output into CSV format with the human-readable # output preceding it as a "comment" (ie. #-prefixed). # BONNIE_FILESIZE=${BONNIE_FILESIZE:=500M} BONNIE_CHUNKSIZE=${BONNIE_CHUNKSIZE:=1K} [ -z "$here" ] && here=`pwd` . $here/common.bonnie # # Sample bonnie throughput output (stderr): #Version 1.02c ------Sequential Output------ --Sequential Input- --Random- # -Per Chr- --Block-- -Rewrite- -Per Chr- --Block-- --Seeks-- #Machine Size:chnk K/sec %CP K/sec %CP K/sec %CP K/sec %CP K/sec %CP /sec %CP # 150M:64k 52024 99 88969 99 245492 100 3746 264 # filter_stderr() { sed -e 's/^..................../# /g' | awk '{print} END {print "#"}' } # # Sample bonnie throughput output (stdout): # ",150M:64k,,,52024,99,88969,99,,,245492,100,3746.0,264,,,,,,,,,,,,," # filter_stdout() { perl -ne ' chomp; s/,+/,/g; s/^,//; s/,$//; @values = split /,/; printf "%9s", shift @values; for ($i = 0; $i <= $#values; $i++) { if ($i % 2) { printf ",%4s%%", $values[$i] } else { printf ",%10s", $values[$i] } } printf "\n"; ' } if [ $# -gt 0 ]; then printf "%9s,%10s,%5s,%10s,%5s,%10s,%5s,%10s,%5s\n" size:chnk \ writeK/s wCPU rewriteK/s rwCPU readK/s rCPU seek/s sCPU exit 0 fi run_bonnie -n 0 -s $BONNIE_FILESIZE:$BONNIE_CHUNKSIZE