HammerHead

Institution SUSE
Client Procs Per Node
Client Operating System SUSE
Client Operating System Version SLES 15 SP 1
Client Kernel Version 4.12.14-197.37-default

DATA SERVER

Storage Type NVMe
Volatile Memory 128G
Storage Interface Ethernet
Network Ethernet
Software Version 6
OS Version SLES 15 SP 1

INFORMATION

Client Nodes 10
Client Total Procs 200
Metadata Nodes 12
Metadata Storage Devices 4
Data Nodes 10
Data Storage Devices 4

METADATA

Easy Write 20.85 kIOP/s
Easy Stat 69.34 kIOP/s
Easy Delete 20.87 kIOP/s
Hard Write 9.27 kIOP/s
Hard Read 27.23 kIOP/s
Hard Stat 89.19 kIOP/s
Hard Delete 6.18 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.

function setup_paths {
  # Set the paths to the binaries and how to launch MPI jobs.
  # If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpiexec"
  io500_mpiargs="-np 200 --hostfile $PWD/hostfile"
}

function setup_directories {
  local workdir
  local resultdir
  local ts

  # set directories where benchmark files are created and where the results go
  # If you want to set up stripe tuning on your output directories or anything
  # similar, then this is the right place to do it.  This creates the output
  # directories for both the app run and the script run.

  timestamp=$(date +%Y.%m.%d-%H.%M.%S)           # create a uniquifier
  [ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # directory where the data will be stored
  workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
  io500_workdir=$workdir-scr
  [ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # the directory where the output results will be kept
  resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
  io500_result_dir=$resultdir-scr

  mkdir -p $workdir-{scr,app} $resultdir-{scr,app}
}

# you should not edit anything below this line
set -eo pipefail  # better error handling

io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
  echo "error: ini file must be specified.  usage: $0 "
  exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
  echo "error: ini file '$io500_ini' not found or empty"
  exit 2
fi

function get_ini_section_param() {
  local section="$1"
  local param="$2"
  local inside=false

  while read LINE; do
    LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
    $inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
    [[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
    ! $inside && continue
    #echo $LINE | awk -F = "/^$param/ { print \$2 }"
    if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
      # echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
      echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
      return
    fi
  done < $io500_ini
  echo ""
}

function get_ini_param() {
  local section="$1"
  local param="$2"
  local default="$3"

  # try and get the most-specific param first, then more generic params
  val=$(get_ini_section_param $section $param)
  [ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
  [ -n "$val" ] || val="$(get_ini_section_param global $param)"

  echo "${val:-$default}" |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}

function get_ini_run_param() {
  local section="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param $section noRun)

  # logic is reversed from "noRun=TRUE" to "run=False"
  [[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}

function get_ini_global_param() {
  local param="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param global $param |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')

  echo "${val:-$default}"
}

# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"

# to run this benchmark, find and edit each of these functions.  Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional

  run_benchmarks

  if [[ ! -s "system-information.txt" ]]; then
    echo "Warning: please create a system-information.txt description by"
    echo "copying the information from https://vi4io.org/io500-info-creator/"
  else
    cp "system-information.txt" $io500_result_dir
  fi

  create_tarball
}

function setup_ior_easy {
  local params

  io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
  val=$(get_ini_param ior-easy API POSIX)
  [ -n "$val" ] && params+=" -a $val"
  val="$(get_ini_param ior-easy transferSize)"
  [ -n "$val" ] && params+=" -t $val"
  val="$(get_ini_param ior-easy hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-easy posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_easy_params="$params"
  echo -n ""
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves

  val=$(get_ini_param mdtest-easy n 1000000)
  [ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
  val=$(get_ini_param mdtest-easy API POSIX)
  [ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
  val=$(get_ini_param mdtest-easy posix.odirect)
  [ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
  echo -n ""
}

function setup_ior_hard {
  local params

  io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
  io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
  val="$(get_ini_param ior-hard hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-hard posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_hard_api_specific_options="$params"
  echo -n ""
}

function setup_mdt_hard {
  val=$(get_ini_param mdtest-hard n 1000000)
  [ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
  io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
  io500_mdtest_hard_api_specific_options=""
  echo -n ""
}

function setup_find {
  val="$(get_ini_param find external-script)"
  [ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
    io500_find_cmd="$val"
  # uses stonewalling, run pfind
  io500_find_cmd_args="$(get_ini_param find external-extra-args)"
  echo -n ""
}

function setup_mdreal {
  echo -n ""
}

function run_benchmarks {
  local app_first=$((RANDOM % 100))
  local app_rc=0

  # run the app and C version in random order to try and avoid bias
  (( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?

  # Important: source the io500_fixed.sh script.  Do not change it. If you
  # discover a need to change it, please email the mailing list to discuss.
  source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
    tee $io500_result_dir/io-500-summary.$timestamp.txt

  (( $app_first >= 50 )) && return $app_rc

  echo "The io500.sh was run"
  echo
  echo "Running the C version of the benchmark now"
  # run the app and C version in random order to try and avoid bias
  $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}

create_tarball() {
  local sourcedir=$(dirname $io500_result_dir)
  local fname=$(basename ${io500_result_dir%-scr})
  local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz

  cp -v $0 $io500_ini $io500_result_dir
  tar czf $tarball -C $sourcedir $fname-{app,scr}
  echo "Created result tarball $tarball"
}

# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
  # UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
  # EXAMPLE:
  # io500_info_system_name='xxx'
  # DO NOT ADD IT HERE
  :
}

main
ior_easy_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul  6 02:48:41 2020
Command line        : /IO500/bin/ior -r -R -a POSIX -t 2m -v -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy/stonewall
Machine             : Linux sr650-1
Start time skew across all tasks: 321.14 sec
TestID              : 0
StartTime           : Mon Jul  6 02:48:41 2020
Path                : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy
FS                  : 61.9 TiB   Used FS: 22.3%   Inodes: 13.9 Mi   Used Inodes: 100.0%
Participating tasks: 200
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 200
clients per node    : 20
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 1892.09 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing read performance test: Mon Jul  6 02:48:52 2020

WARNING: Expected aggregate file size       = 2080374784000000.
WARNING: Stat() of aggregate file size      = 7118992179200.
WARNING: Using actual aggregate bytes moved = 7118992179200.
read      7012       3547.30    0.000193    10158080000 2048.00    11.22      956.95     0.002494   968.17     0   
Max Read:  7012.38 MiB/sec (7353.01 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         7012.38    7012.38    7012.38       0.00    3506.19    3506.19    3506.19       0.00  968.17361         NA            NA     0    200  20    1   1     1        1         0    0      1 10401873920000  2097152 6789200.0 POSIX      0
Finished            : Mon Jul  6 03:04:49 2020
ior_easy_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul  6 02:21:07 2020
Command line        : /IO500/bin/ior -w -a POSIX -t 2m -v -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sr650-1
Start time skew across all tasks: 321.14 sec
TestID              : 0
StartTime           : Mon Jul  6 02:21:07 2020
Path                : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy
FS                  : 64.3 TiB   Used FS: 9.9%   Inodes: 1.6 Mi   Used Inodes: 100.0%
Participating tasks: 200
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 200
clients per node    : 20
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 1892.09 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing write performance test: Mon Jul  6 02:21:07 2020
133: stonewalling pairs accessed: 9636
103: stonewalling pairs accessed: 9687
53: stonewalling pairs accessed: 9401
173: stonewalling pairs accessed: 9462
143: stonewalling pairs accessed: 9378
123: stonewalling pairs accessed: 9339
81: stonewalling pairs accessed: 9391
94: stonewalling pairs accessed: 9846
4: stonewalling pairs accessed: 9760
64: stonewalling pairs accessed: 9911
147: stonewalling pairs accessed: 10366
26: stonewalling pairs accessed: 10083
156: stonewalling pairs accessed: 10088
24: stonewalling pairs accessed: 9371
54: stonewalling pairs accessed: 9449
78: stonewalling pairs accessed: 10784
98: stonewalling pairs accessed: 10683
148: stonewalling pairs accessed: 11066
18: stonewalling pairs accessed: 10578
150: stonewalling pairs accessed: 16169
130: stonewalling pairs accessed: 16617
140: stonewalling pairs accessed: 16135
85: stonewalling pairs accessed: 9490
195: stonewalling pairs accessed: 8918
125: stonewalling pairs accessed: 9494
55: stonewalling pairs accessed: 9450
63: stonewalling pairs accessed: 9246
41: stonewalling pairs accessed: 8842
193: stonewalling pairs accessed: 9143
73: stonewalling pairs accessed: 9231
13: stonewalling pairs accessed: 8920
43: stonewalling pairs accessed: 9436
100: stonewalling pairs accessed: 16849
17: stonewalling pairs accessed: 10595
197: stonewalling pairs accessed: 9938
57: stonewalling pairs accessed: 10432
165: stonewalling pairs accessed: 8980
84: stonewalling pairs accessed: 9579
44: stonewalling pairs accessed: 9902
129: stonewalling pairs accessed: 8483
99: stonewalling pairs accessed: 8640
71: stonewalling pairs accessed: 9435
199: stonewalling pairs accessed: 8200
19: stonewalling pairs accessed: 8325
9: stonewalling pairs accessed: 8262
59: stonewalling pairs accessed: 8319
29: stonewalling pairs accessed: 8573
49: stonewalling pairs accessed: 8355
119: stonewalling pairs accessed: 8636
28: stonewalling pairs accessed: 10819
178: stonewalling pairs accessed: 10506
68: stonewalling pairs accessed: 10492
48: stonewalling pairs accessed: 11021
102: stonewalling pairs accessed: 15072
162: stonewalling pairs accessed: 15877
12: stonewalling pairs accessed: 15434
92: stonewalling pairs accessed: 15496
137: stonewalling pairs accessed: 10459
136: stonewalling pairs accessed: 9252
46: stonewalling pairs accessed: 10094
6: stonewalling pairs accessed: 9572
126: stonewalling pairs accessed: 10079
159: stonewalling pairs accessed: 8092
158: stonewalling pairs accessed: 10676
32: stonewalling pairs accessed: 15062
97: stonewalling pairs accessed: 10036
52: stonewalling pairs accessed: 15885
155: stonewalling pairs accessed: 8984
151: stonewalling pairs accessed: 9131
191: stonewalling pairs accessed: 8779
104: stonewalling pairs accessed: 9885
180: stonewalling pairs accessed: 16710
80: stonewalling pairs accessed: 16973
40: stonewalling pairs accessed: 16442
20: stonewalling pairs accessed: 16935
110: stonewalling pairs accessed: 15964
135: stonewalling pairs accessed: 9384
33: stonewalling pairs accessed: 9625
93: stonewalling pairs accessed: 9604
23: stonewalling pairs accessed: 9561
124: stonewalling pairs accessed: 9953
184: stonewalling pairs accessed: 9236
109: stonewalling pairs accessed: 8469
160: stonewalling pairs accessed: 16865
90: stonewalling pairs accessed: 16824
65: stonewalling pairs accessed: 9272
25: stonewalling pairs accessed: 9473
185: stonewalling pairs accessed: 8942
91: stonewalling pairs accessed: 9064
141: stonewalling pairs accessed: 9471
111: stonewalling pairs accessed: 9420
51: stonewalling pairs accessed: 8850
113: stonewalling pairs accessed: 9653
21: stonewalling pairs accessed: 9154
169: stonewalling pairs accessed: 8329
179: stonewalling pairs accessed: 8461
114: stonewalling pairs accessed: 9813
149: stonewalling pairs accessed: 8659
189: stonewalling pairs accessed: 8395
145: stonewalling pairs accessed: 9435
83: stonewalling pairs accessed: 9264
183: stonewalling pairs accessed: 9143
34: stonewalling pairs accessed: 10097
69: stonewalling pairs accessed: 8405
0: stonewalling pairs accessed: 16555
170: stonewalling pairs accessed: 16643
70: stonewalling pairs accessed: 16787
142: stonewalling pairs accessed: 15868
152: stonewalling pairs accessed: 15785
72: stonewalling pairs accessed: 15789
182: stonewalling pairs accessed: 15760
22: stonewalling pairs accessed: 15915
62: stonewalling pairs accessed: 15665
2: stonewalling pairs accessed: 15506
172: stonewalling pairs accessed: 15686
192: stonewalling pairs accessed: 15407
66: stonewalling pairs accessed: 9829
186: stonewalling pairs accessed: 10178
116: stonewalling pairs accessed: 10184
196: stonewalling pairs accessed: 9436
166: stonewalling pairs accessed: 9478
60: stonewalling pairs accessed: 16561
3: stonewalling pairs accessed: 8939
154: stonewalling pairs accessed: 9528
144: stonewalling pairs accessed: 9750
174: stonewalling pairs accessed: 9967
118: stonewalling pairs accessed: 11077
168: stonewalling pairs accessed: 10582
88: stonewalling pairs accessed: 11078
8: stonewalling pairs accessed: 10708
128: stonewalling pairs accessed: 10731
36: stonewalling pairs accessed: 9278
108: stonewalling pairs accessed: 11022
188: stonewalling pairs accessed: 10762
38: stonewalling pairs accessed: 10771
138: stonewalling pairs accessed: 11062
198: stonewalling pairs accessed: 10974
132: stonewalling pairs accessed: 15844
37: stonewalling pairs accessed: 10419
177: stonewalling pairs accessed: 10136
107: stonewalling pairs accessed: 10464
117: stonewalling pairs accessed: 10434
77: stonewalling pairs accessed: 10470
7: stonewalling pairs accessed: 10317
75: stonewalling pairs accessed: 8960
167: stonewalling pairs accessed: 10000
115: stonewalling pairs accessed: 9310
105: stonewalling pairs accessed: 9510
95: stonewalling pairs accessed: 9525
45: stonewalling pairs accessed: 8815
35: stonewalling pairs accessed: 9533
153: stonewalling pairs accessed: 8935
58: stonewalling pairs accessed: 10596
31: stonewalling pairs accessed: 9527
1: stonewalling pairs accessed: 9181
101: stonewalling pairs accessed: 8986
11: stonewalling pairs accessed: 8862
121: stonewalling pairs accessed: 9439
50: stonewalling pairs accessed: 16525
10: stonewalling pairs accessed: 16499
190: stonewalling pairs accessed: 16520
30: stonewalling pairs accessed: 16586
120: stonewalling pairs accessed: 16905
112: stonewalling pairs accessed: 15739
42: stonewalling pairs accessed: 16004
176: stonewalling pairs accessed: 9597
106: stonewalling pairs accessed: 10111
181: stonewalling pairs accessed: 9375
163: stonewalling pairs accessed: 9239
14: stonewalling pairs accessed: 9299
134: stonewalling pairs accessed: 9827
39: stonewalling pairs accessed: 8337
79: stonewalling pairs accessed: 8534
164: stonewalling pairs accessed: 9308
194: stonewalling pairs accessed: 9276
139: stonewalling pairs accessed: 8693
89: stonewalling pairs accessed: 8562
82: stonewalling pairs accessed: 15724
27: stonewalling pairs accessed: 10472
127: stonewalling pairs accessed: 10431
67: stonewalling pairs accessed: 10390
56: stonewalling pairs accessed: 10091
146: stonewalling pairs accessed: 9448
15: stonewalling pairs accessed: 9012
5: stonewalling pairs accessed: 8932
161: stonewalling pairs accessed: 8837
175: stonewalling pairs accessed: 8906
76: stonewalling pairs accessed: 10100
131: stonewalling pairs accessed: 9228
122: stonewalling pairs accessed: 15912
87: stonewalling pairs accessed: 10147
96: stonewalling pairs accessed: 9490
61: stonewalling pairs accessed: 8871
171: stonewalling pairs accessed: 9369
47: stonewalling pairs accessed: 10012
157: stonewalling pairs accessed: 10017
86: stonewalling pairs accessed: 9264
74: stonewalling pairs accessed: 9833
16: stonewalling pairs accessed: 9491
187: stonewalling pairs accessed: 10291
stonewalling pairs accessed min: 8092 max: 16973 -- min data: 15.8 GiB mean data: 21.3 GiB time: 300.1s
WARNING: Expected aggregate file size       = 2080374784000000.
WARNING: Stat() of aggregate file size      = 7118992179200.
WARNING: Using actual aggregate bytes moved = 7118992179200.
WARNING: maybe caused by deadlineForStonewalling
write     12931      6466       0.000071    10158080000 2048.00    0.026234   525.01     0.001342   525.03     0   
Max Write: 12930.97 MiB/sec (13559.10 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       12930.97   12930.97   12930.97       0.00    6465.49    6465.49    6465.49       0.00  525.03408     300.07      14524.24     0    200  20    1   1     1        1         0    0      1 10401873920000  2097152 6789200.0 POSIX      0
Finished            : Mon Jul  6 02:29:52 2020
ior_hard_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul  6 03:06:45 2020
Command line        : /IO500/bin/ior -r -R -s 500000 -a POSIX -v -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard/stonewall
Machine             : Linux sr650-1
Start time skew across all tasks: 321.14 sec
TestID              : 0
StartTime           : Mon Jul  6 03:06:45 2020
Path                : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard
FS                  : 61.9 TiB   Used FS: 22.3%   Inodes: 13.9 Mi   Used Inodes: 100.0%
Participating tasks: 200
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 500000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 200
clients per node    : 20
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 4.28 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing read performance test: Mon Jul  6 03:06:45 2020

WARNING: Expected aggregate file size       = 4700800000000.
WARNING: Stat() of aggregate file size      = 1041293011200.
WARNING: Using actual aggregate bytes moved = 1041293011200.
read      11704      261083     84.84       45.91      45.91      0.001829   84.84      0.000370   84.85      0   
Max Read:  11704.16 MiB/sec (12272.70 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        11704.16   11704.16   11704.16       0.00  261076.79  261076.79  261076.79       0.00   84.84630         NA            NA     0    200  20    1   0     1        1         0    0 500000    47008    47008  993054.4 POSIX      0
Finished            : Mon Jul  6 03:08:10 2020
ior_hard_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul  6 02:36:16 2020
Command line        : /IO500/bin/ior -w -s 500000 -a POSIX -v -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sr650-1
Start time skew across all tasks: 321.14 sec
TestID              : 0
StartTime           : Mon Jul  6 02:36:16 2020
Path                : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard
FS                  : 62.2 TiB   Used FS: 20.6%   Inodes: 10.6 Mi   Used Inodes: 100.0%
Participating tasks: 200
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 500000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 200
clients per node    : 20
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 4.28 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing write performance test: Mon Jul  6 02:36:16 2020
36: stonewalling pairs accessed: 85889
186: stonewalling pairs accessed: 96605
96: stonewalling pairs accessed: 110439
105: stonewalling pairs accessed: 110719
115: stonewalling pairs accessed: 110643
37: stonewalling pairs accessed: 110263
48: stonewalling pairs accessed: 110725
149: stonewalling pairs accessed: 88150
119: stonewalling pairs accessed: 89375
75: stonewalling pairs accessed: 110643
134: stonewalling pairs accessed: 86427
145: stonewalling pairs accessed: 110583
176: stonewalling pairs accessed: 87199
112: stonewalling pairs accessed: 109297
15: stonewalling pairs accessed: 110491
148: stonewalling pairs accessed: 110754
23: stonewalling pairs accessed: 88128
165: stonewalling pairs accessed: 97859
192: stonewalling pairs accessed: 86581
133: stonewalling pairs accessed: 110289
164: stonewalling pairs accessed: 110503
70: stonewalling pairs accessed: 110754
117: stonewalling pairs accessed: 85622
123: stonewalling pairs accessed: 86117
141: stonewalling pairs accessed: 97331
172: stonewalling pairs accessed: 109481
88: stonewalling pairs accessed: 110581
67: stonewalling pairs accessed: 86893
175: stonewalling pairs accessed: 110728
63: stonewalling pairs accessed: 87437
144: stonewalling pairs accessed: 87067
150: stonewalling pairs accessed: 110621
29: stonewalling pairs accessed: 88125
1: stonewalling pairs accessed: 96140
90: stonewalling pairs accessed: 110622
142: stonewalling pairs accessed: 85638
109: stonewalling pairs accessed: 89799
81: stonewalling pairs accessed: 96467
27: stonewalling pairs accessed: 87157
58: stonewalling pairs accessed: 110711
11: stonewalling pairs accessed: 96115
3: stonewalling pairs accessed: 110347
25: stonewalling pairs accessed: 89195
73: stonewalling pairs accessed: 87126
130: stonewalling pairs accessed: 110716
118: stonewalling pairs accessed: 110582
111: stonewalling pairs accessed: 88699
137: stonewalling pairs accessed: 86349
87: stonewalling pairs accessed: 86890
21: stonewalling pairs accessed: 88466
57: stonewalling pairs accessed: 86453
74: stonewalling pairs accessed: 95574
191: stonewalling pairs accessed: 95363
108: stonewalling pairs accessed: 110375
168: stonewalling pairs accessed: 110720
189: stonewalling pairs accessed: 88437
53: stonewalling pairs accessed: 86351
95: stonewalling pairs accessed: 88582
22: stonewalling pairs accessed: 108692
82: stonewalling pairs accessed: 85808
104: stonewalling pairs accessed: 86083
72: stonewalling pairs accessed: 109747
113: stonewalling pairs accessed: 85623
60: stonewalling pairs accessed: 110502
61: stonewalling pairs accessed: 88160
180: stonewalling pairs accessed: 110690
68: stonewalling pairs accessed: 110461
183: stonewalling pairs accessed: 109809
98: stonewalling pairs accessed: 110582
154: stonewalling pairs accessed: 87525
199: stonewalling pairs accessed: 89108
169: stonewalling pairs accessed: 87063
49: stonewalling pairs accessed: 88574
24: stonewalling pairs accessed: 110258
170: stonewalling pairs accessed: 110706
33: stonewalling pairs accessed: 95761
194: stonewalling pairs accessed: 87406
185: stonewalling pairs accessed: 110711
102: stonewalling pairs accessed: 108283
51: stonewalling pairs accessed: 87446
79: stonewalling pairs accessed: 89255
159: stonewalling pairs accessed: 90145
93: stonewalling pairs accessed: 86790
16: stonewalling pairs accessed: 97215
110: stonewalling pairs accessed: 110625
124: stonewalling pairs accessed: 95669
163: stonewalling pairs accessed: 86778
59: stonewalling pairs accessed: 88771
198: stonewalling pairs accessed: 110711
83: stonewalling pairs accessed: 96051
30: stonewalling pairs accessed: 110712
40: stonewalling pairs accessed: 110712
188: stonewalling pairs accessed: 110711
116: stonewalling pairs accessed: 95858
46: stonewalling pairs accessed: 86525
182: stonewalling pairs accessed: 85904
127: stonewalling pairs accessed: 86871
47: stonewalling pairs accessed: 97888
62: stonewalling pairs accessed: 109249
14: stonewalling pairs accessed: 95672
94: stonewalling pairs accessed: 109810
43: stonewalling pairs accessed: 85941
173: stonewalling pairs accessed: 95284
42: stonewalling pairs accessed: 94782
12: stonewalling pairs accessed: 85602
38: stonewalling pairs accessed: 88809
138: stonewalling pairs accessed: 110690
8: stonewalling pairs accessed: 109301
158: stonewalling pairs accessed: 110618
26: stonewalling pairs accessed: 110436
106: stonewalling pairs accessed: 95669
136: stonewalling pairs accessed: 96475
161: stonewalling pairs accessed: 86881
166: stonewalling pairs accessed: 85883
52: stonewalling pairs accessed: 109282
114: stonewalling pairs accessed: 96702
195: stonewalling pairs accessed: 110711
31: stonewalling pairs accessed: 87986
155: stonewalling pairs accessed: 110603
5: stonewalling pairs accessed: 110725
55: stonewalling pairs accessed: 110713
65: stonewalling pairs accessed: 110713
135: stonewalling pairs accessed: 110716
197: stonewalling pairs accessed: 85982
107: stonewalling pairs accessed: 85558
89: stonewalling pairs accessed: 87762
132: stonewalling pairs accessed: 85740
44: stonewalling pairs accessed: 96966
157: stonewalling pairs accessed: 95523
131: stonewalling pairs accessed: 96393
152: stonewalling pairs accessed: 86294
187: stonewalling pairs accessed: 87062
17: stonewalling pairs accessed: 86365
97: stonewalling pairs accessed: 88194
7: stonewalling pairs accessed: 96590
92: stonewalling pairs accessed: 108634
126: stonewalling pairs accessed: 95453
71: stonewalling pairs accessed: 88629
50: stonewalling pairs accessed: 110686
100: stonewalling pairs accessed: 110718
2: stonewalling pairs accessed: 86085
6: stonewalling pairs accessed: 86836
196: stonewalling pairs accessed: 95630
35: stonewalling pairs accessed: 110686
0: stonewalling pairs accessed: 110686
54: stonewalling pairs accessed: 96575
45: stonewalling pairs accessed: 110625
181: stonewalling pairs accessed: 95292
146: stonewalling pairs accessed: 95795
122: stonewalling pairs accessed: 109513
80: stonewalling pairs accessed: 110757
101: stonewalling pairs accessed: 88738
120: stonewalling pairs accessed: 110757
34: stonewalling pairs accessed: 85721
41: stonewalling pairs accessed: 96433
91: stonewalling pairs accessed: 87760
121: stonewalling pairs accessed: 87371
171: stonewalling pairs accessed: 88246
151: stonewalling pairs accessed: 96479
153: stonewalling pairs accessed: 109708
193: stonewalling pairs accessed: 109925
13: stonewalling pairs accessed: 94831
143: stonewalling pairs accessed: 110246
103: stonewalling pairs accessed: 95331
39: stonewalling pairs accessed: 99100
9: stonewalling pairs accessed: 90787
19: stonewalling pairs accessed: 87396
69: stonewalling pairs accessed: 88697
129: stonewalling pairs accessed: 91027
179: stonewalling pairs accessed: 96725
178: stonewalling pairs accessed: 87776
128: stonewalling pairs accessed: 107034
10: stonewalling pairs accessed: 110656
140: stonewalling pairs accessed: 110647
160: stonewalling pairs accessed: 110647
20: stonewalling pairs accessed: 110725
190: stonewalling pairs accessed: 110741
32: stonewalling pairs accessed: 109734
162: stonewalling pairs accessed: 109484
85: stonewalling pairs accessed: 97978
4: stonewalling pairs accessed: 86746
99: stonewalling pairs accessed: 88657
139: stonewalling pairs accessed: 89013
167: stonewalling pairs accessed: 96238
84: stonewalling pairs accessed: 110570
174: stonewalling pairs accessed: 86033
64: stonewalling pairs accessed: 95885
184: stonewalling pairs accessed: 87149
18: stonewalling pairs accessed: 110725
28: stonewalling pairs accessed: 110736
78: stonewalling pairs accessed: 110521
86: stonewalling pairs accessed: 95215
56: stonewalling pairs accessed: 97130
66: stonewalling pairs accessed: 96219
156: stonewalling pairs accessed: 86486
125: stonewalling pairs accessed: 110618
77: stonewalling pairs accessed: 86755
177: stonewalling pairs accessed: 109715
76: stonewalling pairs accessed: 96051
147: stonewalling pairs accessed: 86431
stonewalling pairs accessed min: 85558 max: 110757 -- min data: 3.7 GiB mean data: 4.3 GiB time: 300.0s
WARNING: Expected aggregate file size       = 4700800000000.
WARNING: Stat() of aggregate file size      = 1041293011200.
WARNING: Using actual aggregate bytes moved = 1041293011200.
WARNING: maybe caused by deadlineForStonewalling
write     2561.58    57142      300.23      45.91      45.91      0.016433   387.66     0.001034   387.67     0   
Max Write: 2561.58 MiB/sec (2686.01 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        2561.58    2561.58    2561.58       0.00   57139.40   57139.40   57139.40       0.00  387.67296     300.03       2937.48     0    200  20    1   0     1        1         0    0 500000    47008    47008  993054.4 POSIX      0
Finished            : Mon Jul  6 02:42:44 2020
mdtest_easy_delete
-- started at 07/06/2020 03:08:44 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-r' '-F' '-P' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_easy' '-n' '40000' '-u' '-L' '-a' 'POSIX' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_easy-stonewall' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 61.9 TiB   Used FS: 22.3%   Inodes: 13.9 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :      20867.034      20866.984      20867.013          0.015
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.383          0.383          0.383          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        383.381        383.380        383.380          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          2.614          2.614          2.614          0.000
-- finished at 07/06/2020 03:15:10 --

mdtest_easy_stat
-- started at 07/06/2020 03:04:50 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-T' '-F' '-P' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_easy' '-n' '40000' '-u' '-L' '-a' 'POSIX' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_easy-stonewall' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 61.9 TiB   Used FS: 22.3%   Inodes: 13.9 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :      69344.263      69344.087      69344.188          0.054
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :        115.367        115.366        115.367          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/06/2020 03:06:45 --

mdtest_easy_write
-- started at 07/06/2020 02:29:52 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_easy' '-n' '40000' '-u' '-L' '-a' 'POSIX' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 62.2 TiB   Used FS: 20.6%   Inodes: 3.2 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files
Continue stonewall hit min: 12748 max: 40000 avg: 37943.9 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :      20847.884      20847.834      20847.866          0.016
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA      25252.349             NA
   Tree creation             :         29.556         29.556         29.556          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        383.733        383.732        383.732          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.518             NA
   Tree creation             :          0.034          0.034          0.034          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/06/2020 02:36:16 --

mdtest_hard_delete
-- started at 07/06/2020 03:16:58 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 61.9 TiB   Used FS: 22.3%   Inodes: 6.2 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :       6182.797       6182.782       6182.791          0.005
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :         15.161         15.161         15.161          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        475.449        475.448        475.449          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.066          0.066          0.066          0.000
-- finished at 07/06/2020 03:24:54 --

mdtest_hard_read
-- started at 07/06/2020 03:15:10 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 61.9 TiB   Used FS: 22.3%   Inodes: 6.3 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :      27228.272      27228.193      27228.240          0.025
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :        107.962        107.961        107.961          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/06/2020 03:16:58 --

mdtest_hard_stat
-- started at 07/06/2020 03:08:10 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 61.9 TiB   Used FS: 22.3%   Inodes: 13.9 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :      89194.191      89193.617      89193.943          0.187
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         32.958         32.957         32.957          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/06/2020 03:08:43 --

mdtest_hard_write
-- started at 07/06/2020 02:42:44 --

mdtest-3.3.0+dev was launched with 200 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.06-02.27.35-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1' '-W' '300'
Path: /mnt/cephfs/datafiles/2020.07.06-02.27.35-scr
FS: 61.9 TiB   Used FS: 22.2%   Inodes: 10.9 Mi   Used Inodes: 100.0%

Nodemap: 10000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
200 tasks, 8000000 files
Continue stonewall hit min: 13994 max: 14698 avg: 14434.4 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :       9272.038       9272.015       9272.029          0.007
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA       9597.629             NA
   Tree creation             :       4634.590       4634.590       4634.590          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        317.040        317.039        317.040          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.792             NA
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/06/2020 02:48:01 --

result_summary
[RESULT] BW   phase 1            ior_easy_write               12.628 GiB/s : time 525.01 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               20.848 kiops : time 383.73 seconds
[RESULT] BW   phase 2            ior_hard_write                2.502 GiB/s : time 387.66 seconds
[RESULT] IOPS phase 2         mdtest_hard_write                9.272 kiops : time 317.04 seconds
[RESULT] IOPS phase 3                      find              275.880 kiops : time  39.65 seconds
[RESULT] BW   phase 3             ior_easy_read                6.848 GiB/s : time 956.95 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat               69.344 kiops : time 115.37 seconds
[RESULT] BW   phase 4             ior_hard_read               11.430 GiB/s : time  84.84 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat               89.194 kiops : time  32.96 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               20.867 kiops : time 383.38 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               27.228 kiops : time 107.96 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete                6.183 kiops : time 476.14 seconds
[SCORE] Bandwidth 7.05146 GiB/s : IOPS 32.2104 kiops : TOTAL 15.0708