HammerHead

Institution SUSE
Client Procs Per Node
Client Operating System SUSE
Client Operating System Version SLES 15 SP 1
Client Kernel Version 4.12.14-197.37-default

DATA SERVER

Storage Type NVMe
Volatile Memory 128G
Storage Interface Ethernet
Network Ethernet
Software Version 6
OS Version SLES 15 SP 1

INFORMATION

Client Nodes 10
Client Total Procs 220
Metadata Nodes 12
Metadata Storage Devices 4
Data Nodes 10
Data Storage Devices 4

METADATA

Easy Write 26.72 kIOP/s
Easy Stat 79.74 kIOP/s
Easy Delete 20.79 kIOP/s
Hard Write 10.00 kIOP/s
Hard Read 27.65 kIOP/s
Hard Stat 83.66 kIOP/s
Hard Delete 5.75 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.

function setup_paths {
  # Set the paths to the binaries and how to launch MPI jobs.
  # If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpiexec"
  io500_mpiargs="-np 220 --hostfile $PWD/hostfile"
}

function setup_directories {
  local workdir
  local resultdir
  local ts

  # set directories where benchmark files are created and where the results go
  # If you want to set up stripe tuning on your output directories or anything
  # similar, then this is the right place to do it.  This creates the output
  # directories for both the app run and the script run.

  timestamp=$(date +%Y.%m.%d-%H.%M.%S)           # create a uniquifier
  [ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # directory where the data will be stored
  workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
  io500_workdir=$workdir-scr
  [ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # the directory where the output results will be kept
  resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
  io500_result_dir=$resultdir-scr

  mkdir -p $workdir-{scr,app} $resultdir-{scr,app}
}

# you should not edit anything below this line
set -eo pipefail  # better error handling

io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
  echo "error: ini file must be specified.  usage: $0 "
  exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
  echo "error: ini file '$io500_ini' not found or empty"
  exit 2
fi

function get_ini_section_param() {
  local section="$1"
  local param="$2"
  local inside=false

  while read LINE; do
    LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
    $inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
    [[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
    ! $inside && continue
    #echo $LINE | awk -F = "/^$param/ { print \$2 }"
    if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
      # echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
      echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
      return
    fi
  done < $io500_ini
  echo ""
}

function get_ini_param() {
  local section="$1"
  local param="$2"
  local default="$3"

  # try and get the most-specific param first, then more generic params
  val=$(get_ini_section_param $section $param)
  [ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
  [ -n "$val" ] || val="$(get_ini_section_param global $param)"

  echo "${val:-$default}" |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}

function get_ini_run_param() {
  local section="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param $section noRun)

  # logic is reversed from "noRun=TRUE" to "run=False"
  [[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}

function get_ini_global_param() {
  local param="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param global $param |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')

  echo "${val:-$default}"
}

# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"

# to run this benchmark, find and edit each of these functions.  Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional

  run_benchmarks

  if [[ ! -s "system-information.txt" ]]; then
    echo "Warning: please create a system-information.txt description by"
    echo "copying the information from https://vi4io.org/io500-info-creator/"
  else
    cp "system-information.txt" $io500_result_dir
  fi

  create_tarball
}

function setup_ior_easy {
  local params

  io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
  val=$(get_ini_param ior-easy API POSIX)
  [ -n "$val" ] && params+=" -a $val"
  val="$(get_ini_param ior-easy transferSize)"
  [ -n "$val" ] && params+=" -t $val"
  val="$(get_ini_param ior-easy hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-easy posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_easy_params="$params"
  echo -n ""
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves

  val=$(get_ini_param mdtest-easy n 1000000)
  [ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
  val=$(get_ini_param mdtest-easy API POSIX)
  [ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
  val=$(get_ini_param mdtest-easy posix.odirect)
  [ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
  echo -n ""
}

function setup_ior_hard {
  local params

  io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
  io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
  val="$(get_ini_param ior-hard hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-hard posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_hard_api_specific_options="$params"
  echo -n ""
}

function setup_mdt_hard {
  val=$(get_ini_param mdtest-hard n 1000000)
  [ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
  io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
  io500_mdtest_hard_api_specific_options=""
  echo -n ""
}

function setup_find {
  val="$(get_ini_param find external-script)"
  [ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
    io500_find_cmd="$val"
  # uses stonewalling, run pfind
  io500_find_cmd_args="$(get_ini_param find external-extra-args)"
  echo -n ""
}

function setup_mdreal {
  echo -n ""
}

function run_benchmarks {
  local app_first=$((RANDOM % 100))
  local app_rc=0

  # run the app and C version in random order to try and avoid bias
  (( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?

  # Important: source the io500_fixed.sh script.  Do not change it. If you
  # discover a need to change it, please email the mailing list to discuss.
  source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
    tee $io500_result_dir/io-500-summary.$timestamp.txt

  (( $app_first >= 50 )) && return $app_rc

  echo "The io500.sh was run"
  echo
  echo "Running the C version of the benchmark now"
  # run the app and C version in random order to try and avoid bias
  $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}

create_tarball() {
  local sourcedir=$(dirname $io500_result_dir)
  local fname=$(basename ${io500_result_dir%-scr})
  local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz

  cp -v $0 $io500_ini $io500_result_dir
  tar czf $tarball -C $sourcedir $fname-{app,scr}
  echo "Created result tarball $tarball"
}

# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
  # UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
  # EXAMPLE:
  # io500_info_system_name='xxx'
  # DO NOT ADD IT HERE
  :
}

main
ior_easy_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Sat Jul 11 11:27:50 2020
Command line        : /IO500/bin/ior -r -R -a POSIX -t 2m -v -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy/stonewall
Machine             : Linux sr650-1
Start time skew across all tasks: 321.10 sec
TestID              : 0
StartTime           : Sat Jul 11 11:27:50 2020
Path                : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy
FS                  : 62.3 TiB   Used FS: 22.1%   Inodes: 23.2 Mi   Used Inodes: 100.0%
Participating tasks: 220
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 220
clients per node    : 22
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 2081.30 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing read performance test: Sat Jul 11 11:27:50 2020

WARNING: Expected aggregate file size       = 2288412262400000.
WARNING: Stat() of aggregate file size      = 7166975016960.
WARNING: Using actual aggregate bytes moved = 7166975016960.
read      6946       3475.47    0.000198    10158080000 2048.00    0.641994   983.31     0.002551   983.96     0   
Max Read:  6946.40 MiB/sec (7283.83 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         6946.40    6946.40    6946.40       0.00    3473.20    3473.20    3473.20       0.00  983.95684         NA            NA     0    220  22    1   1     1        1         0    0      1 10401873920000  2097152 6834960.0 POSIX      0
Finished            : Sat Jul 11 11:44:14 2020
ior_easy_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Sat Jul 11 11:01:26 2020
Command line        : /IO500/bin/ior -w -a POSIX -t 2m -v -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sr650-1
Start time skew across all tasks: 321.10 sec
TestID              : 0
StartTime           : Sat Jul 11 11:01:26 2020
Path                : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy
FS                  : 62.8 TiB   Used FS: 19.4%   Inodes: 11.4 Mi   Used Inodes: 100.0%
Participating tasks: 220
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 220
clients per node    : 22
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 2081.30 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing write performance test: Sat Jul 11 11:01:26 2020
182: stonewalling pairs accessed: 15410
152: stonewalling pairs accessed: 15323
177: stonewalling pairs accessed: 9583
26: stonewalling pairs accessed: 8363
141: stonewalling pairs accessed: 10012
79: stonewalling pairs accessed: 8864
194: stonewalling pairs accessed: 8640
137: stonewalling pairs accessed: 9746
30: stonewalling pairs accessed: 14190
27: stonewalling pairs accessed: 9628
47: stonewalling pairs accessed: 9637
175: stonewalling pairs accessed: 8873
60: stonewalling pairs accessed: 14031
105: stonewalling pairs accessed: 8834
70: stonewalling pairs accessed: 14284
185: stonewalling pairs accessed: 8717
35: stonewalling pairs accessed: 9001
215: stonewalling pairs accessed: 8886
20: stonewalling pairs accessed: 14395
45: stonewalling pairs accessed: 8848
15: stonewalling pairs accessed: 8989
55: stonewalling pairs accessed: 8952
28: stonewalling pairs accessed: 9449
48: stonewalling pairs accessed: 9558
78: stonewalling pairs accessed: 9408
158: stonewalling pairs accessed: 9551
1: stonewalling pairs accessed: 10005
54: stonewalling pairs accessed: 8712
104: stonewalling pairs accessed: 8641
153: stonewalling pairs accessed: 7710
12: stonewalling pairs accessed: 15257
120: stonewalling pairs accessed: 14172
180: stonewalling pairs accessed: 14122
75: stonewalling pairs accessed: 8799
25: stonewalling pairs accessed: 8885
90: stonewalling pairs accessed: 14065
155: stonewalling pairs accessed: 8968
101: stonewalling pairs accessed: 9875
121: stonewalling pairs accessed: 10043
149: stonewalling pairs accessed: 8843
29: stonewalling pairs accessed: 8621
193: stonewalling pairs accessed: 7602
13: stonewalling pairs accessed: 7564
189: stonewalling pairs accessed: 8972
53: stonewalling pairs accessed: 7766
102: stonewalling pairs accessed: 15311
122: stonewalling pairs accessed: 15430
162: stonewalling pairs accessed: 14919
57: stonewalling pairs accessed: 9770
127: stonewalling pairs accessed: 9651
87: stonewalling pairs accessed: 9667
197: stonewalling pairs accessed: 9660
34: stonewalling pairs accessed: 8680
214: stonewalling pairs accessed: 8680
204: stonewalling pairs accessed: 8413
154: stonewalling pairs accessed: 8371
24: stonewalling pairs accessed: 8718
164: stonewalling pairs accessed: 8623
83: stonewalling pairs accessed: 7645
43: stonewalling pairs accessed: 7841
203: stonewalling pairs accessed: 7613
73: stonewalling pairs accessed: 7606
3: stonewalling pairs accessed: 7413
23: stonewalling pairs accessed: 7920
0: stonewalling pairs accessed: 14068
150: stonewalling pairs accessed: 14433
170: stonewalling pairs accessed: 14457
140: stonewalling pairs accessed: 14037
207: stonewalling pairs accessed: 9675
130: stonewalling pairs accessed: 14298
190: stonewalling pairs accessed: 14121
10: stonewalling pairs accessed: 14381
178: stonewalling pairs accessed: 9320
98: stonewalling pairs accessed: 9457
218: stonewalling pairs accessed: 9409
58: stonewalling pairs accessed: 9416
188: stonewalling pairs accessed: 9556
138: stonewalling pairs accessed: 9537
81: stonewalling pairs accessed: 10003
171: stonewalling pairs accessed: 9930
71: stonewalling pairs accessed: 10067
211: stonewalling pairs accessed: 10137
33: stonewalling pairs accessed: 7608
183: stonewalling pairs accessed: 7766
2: stonewalling pairs accessed: 15317
213: stonewalling pairs accessed: 7800
132: stonewalling pairs accessed: 15335
133: stonewalling pairs accessed: 7725
72: stonewalling pairs accessed: 15103
62: stonewalling pairs accessed: 14753
52: stonewalling pairs accessed: 15023
82: stonewalling pairs accessed: 14726
92: stonewalling pairs accessed: 15325
46: stonewalling pairs accessed: 8353
135: stonewalling pairs accessed: 8935
6: stonewalling pairs accessed: 8336
36: stonewalling pairs accessed: 8281
16: stonewalling pairs accessed: 8279
65: stonewalling pairs accessed: 9042
195: stonewalling pairs accessed: 8942
186: stonewalling pairs accessed: 8289
85: stonewalling pairs accessed: 8942
125: stonewalling pairs accessed: 9028
156: stonewalling pairs accessed: 8362
126: stonewalling pairs accessed: 8418
96: stonewalling pairs accessed: 8373
176: stonewalling pairs accessed: 8370
174: stonewalling pairs accessed: 8617
200: stonewalling pairs accessed: 14245
106: stonewalling pairs accessed: 8093
115: stonewalling pairs accessed: 8931
40: stonewalling pairs accessed: 14097
41: stonewalling pairs accessed: 10113
161: stonewalling pairs accessed: 10043
134: stonewalling pairs accessed: 8660
179: stonewalling pairs accessed: 8866
159: stonewalling pairs accessed: 8984
199: stonewalling pairs accessed: 8896
139: stonewalling pairs accessed: 8860
9: stonewalling pairs accessed: 8889
129: stonewalling pairs accessed: 8755
59: stonewalling pairs accessed: 8804
5: stonewalling pairs accessed: 9000
210: stonewalling pairs accessed: 14209
160: stonewalling pairs accessed: 14193
38: stonewalling pairs accessed: 9323
123: stonewalling pairs accessed: 7728
80: stonewalling pairs accessed: 14104
108: stonewalling pairs accessed: 9383
88: stonewalling pairs accessed: 9306
128: stonewalling pairs accessed: 9467
198: stonewalling pairs accessed: 9446
91: stonewalling pairs accessed: 10028
114: stonewalling pairs accessed: 8615
4: stonewalling pairs accessed: 8647
84: stonewalling pairs accessed: 8658
124: stonewalling pairs accessed: 8701
64: stonewalling pairs accessed: 8370
143: stonewalling pairs accessed: 7571
113: stonewalling pairs accessed: 7624
109: stonewalling pairs accessed: 8863
63: stonewalling pairs accessed: 7927
173: stonewalling pairs accessed: 7768
32: stonewalling pairs accessed: 15223
22: stonewalling pairs accessed: 14764
103: stonewalling pairs accessed: 7695
212: stonewalling pairs accessed: 15207
50: stonewalling pairs accessed: 14433
66: stonewalling pairs accessed: 8477
100: stonewalling pairs accessed: 14150
86: stonewalling pairs accessed: 8327
196: stonewalling pairs accessed: 8344
44: stonewalling pairs accessed: 8614
163: stonewalling pairs accessed: 7490
187: stonewalling pairs accessed: 9611
147: stonewalling pairs accessed: 9637
37: stonewalling pairs accessed: 9711
145: stonewalling pairs accessed: 8874
77: stonewalling pairs accessed: 9647
7: stonewalling pairs accessed: 9624
107: stonewalling pairs accessed: 9370
110: stonewalling pairs accessed: 14157
165: stonewalling pairs accessed: 8924
167: stonewalling pairs accessed: 9610
56: stonewalling pairs accessed: 8380
94: stonewalling pairs accessed: 8693
95: stonewalling pairs accessed: 9086
11: stonewalling pairs accessed: 9980
21: stonewalling pairs accessed: 10059
151: stonewalling pairs accessed: 10257
51: stonewalling pairs accessed: 9947
192: stonewalling pairs accessed: 15152
39: stonewalling pairs accessed: 8906
169: stonewalling pairs accessed: 8850
205: stonewalling pairs accessed: 9007
99: stonewalling pairs accessed: 8878
69: stonewalling pairs accessed: 8699
89: stonewalling pairs accessed: 8769
119: stonewalling pairs accessed: 8910
112: stonewalling pairs accessed: 15036
142: stonewalling pairs accessed: 15436
42: stonewalling pairs accessed: 15386
17: stonewalling pairs accessed: 9609
217: stonewalling pairs accessed: 9676
117: stonewalling pairs accessed: 9487
216: stonewalling pairs accessed: 8326
68: stonewalling pairs accessed: 9483
118: stonewalling pairs accessed: 9179
148: stonewalling pairs accessed: 9574
168: stonewalling pairs accessed: 9520
208: stonewalling pairs accessed: 9489
18: stonewalling pairs accessed: 9317
144: stonewalling pairs accessed: 8623
14: stonewalling pairs accessed: 8597
219: stonewalling pairs accessed: 8775
74: stonewalling pairs accessed: 8633
166: stonewalling pairs accessed: 8328
206: stonewalling pairs accessed: 8270
136: stonewalling pairs accessed: 8271
181: stonewalling pairs accessed: 10060
184: stonewalling pairs accessed: 8673
209: stonewalling pairs accessed: 8861
31: stonewalling pairs accessed: 10025
131: stonewalling pairs accessed: 10037
191: stonewalling pairs accessed: 9947
201: stonewalling pairs accessed: 9987
93: stonewalling pairs accessed: 7667
49: stonewalling pairs accessed: 8948
76: stonewalling pairs accessed: 8250
146: stonewalling pairs accessed: 8255
8: stonewalling pairs accessed: 9548
172: stonewalling pairs accessed: 15534
157: stonewalling pairs accessed: 9653
116: stonewalling pairs accessed: 8140
111: stonewalling pairs accessed: 9898
202: stonewalling pairs accessed: 15285
61: stonewalling pairs accessed: 10094
19: stonewalling pairs accessed: 8909
67: stonewalling pairs accessed: 9649
97: stonewalling pairs accessed: 9650
stonewalling pairs accessed min: 7413 max: 15534 -- min data: 14.5 GiB mean data: 19.7 GiB time: 300.1s
WARNING: Expected aggregate file size       = 2288412262400000.
WARNING: Stat() of aggregate file size      = 7166975016960.
WARNING: Using actual aggregate bytes moved = 7166975016960.
WARNING: maybe caused by deadlineForStonewalling
write     13190      6596       0.000091    10158080000 2048.00    0.043101   518.13     0.001541   518.17     0   
Max Write: 13190.49 MiB/sec (13831.23 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       13190.49   13190.49   13190.49       0.00    6595.24    6595.24    6595.24       0.00  518.17349     300.06      14795.60     0    220  22    1   1     1        1         0    0      1 10401873920000  2097152 6834960.0 POSIX      0
Finished            : Sat Jul 11 11:10:04 2020
ior_hard_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Sat Jul 11 11:46:05 2020
Command line        : /IO500/bin/ior -r -R -s 500000 -a POSIX -v -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard/stonewall
Machine             : Linux sr650-1
Start time skew across all tasks: 321.10 sec
TestID              : 0
StartTime           : Sat Jul 11 11:46:05 2020
Path                : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard
FS                  : 62.3 TiB   Used FS: 22.1%   Inodes: 23.2 Mi   Used Inodes: 100.0%
Participating tasks: 220
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 500000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 220
clients per node    : 22
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 4.70 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing read performance test: Sat Jul 11 11:46:05 2020

WARNING: Expected aggregate file size       = 5170880000000.
WARNING: Stat() of aggregate file size      = 947936063360.
WARNING: Using actual aggregate bytes moved = 947936063360.
read      12424      277315     72.71       45.91      45.91      0.051364   72.72      0.003099   72.77      0   
Max Read:  12423.79 MiB/sec (13027.29 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        12423.79   12423.79   12423.79       0.00  277129.28  277129.28  277129.28       0.00   72.76539         NA            NA     0    220  22    1   0     1        1         0    0 500000    47008    47008  904022.2 POSIX      0
Finished            : Sat Jul 11 11:47:17 2020
ior_hard_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Sat Jul 11 11:15:34 2020
Command line        : /IO500/bin/ior -w -s 500000 -a POSIX -v -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sr650-1
Start time skew across all tasks: 321.10 sec
TestID              : 0
StartTime           : Sat Jul 11 11:15:34 2020
Path                : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard
FS                  : 62.6 TiB   Used FS: 20.6%   Inodes: 19.5 Mi   Used Inodes: 100.0%
Participating tasks: 220
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 500000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 220
clients per node    : 22
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 4.70 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing write performance test: Sat Jul 11 11:15:34 2020
166: stonewalling pairs accessed: 70206
46: stonewalling pairs accessed: 77121
124: stonewalling pairs accessed: 72747
186: stonewalling pairs accessed: 87102
132: stonewalling pairs accessed: 72047
15: stonewalling pairs accessed: 88707
110: stonewalling pairs accessed: 89169
25: stonewalling pairs accessed: 88014
131: stonewalling pairs accessed: 88812
205: stonewalling pairs accessed: 88732
147: stonewalling pairs accessed: 77626
55: stonewalling pairs accessed: 89737
16: stonewalling pairs accessed: 70171
216: stonewalling pairs accessed: 87756
117: stonewalling pairs accessed: 91532
10: stonewalling pairs accessed: 78011
211: stonewalling pairs accessed: 90054
52: stonewalling pairs accessed: 88323
107: stonewalling pairs accessed: 81868
164: stonewalling pairs accessed: 78155
56: stonewalling pairs accessed: 70213
38: stonewalling pairs accessed: 77582
27: stonewalling pairs accessed: 91661
194: stonewalling pairs accessed: 70410
142: stonewalling pairs accessed: 79383
158: stonewalling pairs accessed: 88654
0: stonewalling pairs accessed: 70106
36: stonewalling pairs accessed: 70413
149: stonewalling pairs accessed: 71771
68: stonewalling pairs accessed: 70104
187: stonewalling pairs accessed: 72488
120: stonewalling pairs accessed: 89123
9: stonewalling pairs accessed: 70497
32: stonewalling pairs accessed: 78120
218: stonewalling pairs accessed: 89243
127: stonewalling pairs accessed: 91532
39: stonewalling pairs accessed: 71521
188: stonewalling pairs accessed: 88988
138: stonewalling pairs accessed: 89465
157: stonewalling pairs accessed: 74129
2: stonewalling pairs accessed: 70183
47: stonewalling pairs accessed: 78192
18: stonewalling pairs accessed: 71123
140: stonewalling pairs accessed: 70660
82: stonewalling pairs accessed: 89280
65: stonewalling pairs accessed: 77385
90: stonewalling pairs accessed: 69484
72: stonewalling pairs accessed: 91047
108: stonewalling pairs accessed: 89421
195: stonewalling pairs accessed: 87316
12: stonewalling pairs accessed: 78012
125: stonewalling pairs accessed: 90312
159: stonewalling pairs accessed: 79402
129: stonewalling pairs accessed: 88687
169: stonewalling pairs accessed: 70842
146: stonewalling pairs accessed: 70587
181: stonewalling pairs accessed: 90060
123: stonewalling pairs accessed: 90312
44: stonewalling pairs accessed: 89897
135: stonewalling pairs accessed: 90312
153: stonewalling pairs accessed: 89169
163: stonewalling pairs accessed: 72018
13: stonewalling pairs accessed: 70658
83: stonewalling pairs accessed: 72500
162: stonewalling pairs accessed: 77458
203: stonewalling pairs accessed: 88594
185: stonewalling pairs accessed: 77564
33: stonewalling pairs accessed: 72239
112: stonewalling pairs accessed: 87858
155: stonewalling pairs accessed: 78573
63: stonewalling pairs accessed: 78500
54: stonewalling pairs accessed: 78399
71: stonewalling pairs accessed: 78648
213: stonewalling pairs accessed: 71462
22: stonewalling pairs accessed: 88129
214: stonewalling pairs accessed: 88445
137: stonewalling pairs accessed: 78501
60: stonewalling pairs accessed: 70286
170: stonewalling pairs accessed: 77617
207: stonewalling pairs accessed: 91520
88: stonewalling pairs accessed: 71296
154: stonewalling pairs accessed: 70758
189: stonewalling pairs accessed: 72453
180: stonewalling pairs accessed: 71418
197: stonewalling pairs accessed: 72473
128: stonewalling pairs accessed: 72272
77: stonewalling pairs accessed: 91046
17: stonewalling pairs accessed: 91465
87: stonewalling pairs accessed: 91579
172: stonewalling pairs accessed: 72194
114: stonewalling pairs accessed: 78058
21: stonewalling pairs accessed: 72597
79: stonewalling pairs accessed: 89278
122: stonewalling pairs accessed: 70587
1: stonewalling pairs accessed: 88734
73: stonewalling pairs accessed: 71933
40: stonewalling pairs accessed: 77122
171: stonewalling pairs accessed: 89749
24: stonewalling pairs accessed: 71521
34: stonewalling pairs accessed: 91066
74: stonewalling pairs accessed: 89278
184: stonewalling pairs accessed: 71601
156: stonewalling pairs accessed: 87607
152: stonewalling pairs accessed: 71349
26: stonewalling pairs accessed: 70102
212: stonewalling pairs accessed: 79369
49: stonewalling pairs accessed: 70999
19: stonewalling pairs accessed: 79402
41: stonewalling pairs accessed: 79402
8: stonewalling pairs accessed: 78291
42: stonewalling pairs accessed: 88238
67: stonewalling pairs accessed: 91553
103: stonewalling pairs accessed: 88878
92: stonewalling pairs accessed: 89086
191: stonewalling pairs accessed: 90210
95: stonewalling pairs accessed: 89860
84: stonewalling pairs accessed: 88318
141: stonewalling pairs accessed: 91073
183: stonewalling pairs accessed: 88079
105: stonewalling pairs accessed: 88587
198: stonewalling pairs accessed: 88295
48: stonewalling pairs accessed: 88267
59: stonewalling pairs accessed: 77411
193: stonewalling pairs accessed: 85580
145: stonewalling pairs accessed: 90920
61: stonewalling pairs accessed: 90219
151: stonewalling pairs accessed: 89296
173: stonewalling pairs accessed: 90519
182: stonewalling pairs accessed: 72895
175: stonewalling pairs accessed: 89076
209: stonewalling pairs accessed: 90628
20: stonewalling pairs accessed: 89444
165: stonewalling pairs accessed: 90103
139: stonewalling pairs accessed: 79022
89: stonewalling pairs accessed: 88676
115: stonewalling pairs accessed: 91073
179: stonewalling pairs accessed: 88859
91: stonewalling pairs accessed: 79779
37: stonewalling pairs accessed: 91516
11: stonewalling pairs accessed: 89745
100: stonewalling pairs accessed: 88493
51: stonewalling pairs accessed: 80129
53: stonewalling pairs accessed: 70923
62: stonewalling pairs accessed: 70424
7: stonewalling pairs accessed: 91542
94: stonewalling pairs accessed: 78795
66: stonewalling pairs accessed: 69554
144: stonewalling pairs accessed: 80121
200: stonewalling pairs accessed: 70468
136: stonewalling pairs accessed: 70334
93: stonewalling pairs accessed: 70925
76: stonewalling pairs accessed: 69943
119: stonewalling pairs accessed: 71630
219: stonewalling pairs accessed: 77804
202: stonewalling pairs accessed: 70600
78: stonewalling pairs accessed: 71222
99: stonewalling pairs accessed: 69956
208: stonewalling pairs accessed: 71601
113: stonewalling pairs accessed: 70738
111: stonewalling pairs accessed: 72571
30: stonewalling pairs accessed: 71150
5: stonewalling pairs accessed: 88299
109: stonewalling pairs accessed: 71062
143: stonewalling pairs accessed: 70251
98: stonewalling pairs accessed: 78356
29: stonewalling pairs accessed: 88526
102: stonewalling pairs accessed: 70251
106: stonewalling pairs accessed: 69943
148: stonewalling pairs accessed: 88800
97: stonewalling pairs accessed: 91542
70: stonewalling pairs accessed: 69695
217: stonewalling pairs accessed: 73390
150: stonewalling pairs accessed: 77660
104: stonewalling pairs accessed: 72092
31: stonewalling pairs accessed: 89236
64: stonewalling pairs accessed: 89952
130: stonewalling pairs accessed: 72345
14: stonewalling pairs accessed: 77779
134: stonewalling pairs accessed: 70471
133: stonewalling pairs accessed: 89733
80: stonewalling pairs accessed: 70633
210: stonewalling pairs accessed: 71210
176: stonewalling pairs accessed: 70524
196: stonewalling pairs accessed: 77888
86: stonewalling pairs accessed: 70294
116: stonewalling pairs accessed: 71145
6: stonewalling pairs accessed: 70238
178: stonewalling pairs accessed: 71139
45: stonewalling pairs accessed: 71329
215: stonewalling pairs accessed: 72589
69: stonewalling pairs accessed: 90501
96: stonewalling pairs accessed: 70251
43: stonewalling pairs accessed: 71211
23: stonewalling pairs accessed: 78291
192: stonewalling pairs accessed: 73242
190: stonewalling pairs accessed: 78619
160: stonewalling pairs accessed: 71062
199: stonewalling pairs accessed: 78486
50: stonewalling pairs accessed: 77726
57: stonewalling pairs accessed: 91651
167: stonewalling pairs accessed: 91577
177: stonewalling pairs accessed: 91577
118: stonewalling pairs accessed: 77714
174: stonewalling pairs accessed: 71062
204: stonewalling pairs accessed: 71210
4: stonewalling pairs accessed: 72077
28: stonewalling pairs accessed: 71627
168: stonewalling pairs accessed: 80023
58: stonewalling pairs accessed: 78836
201: stonewalling pairs accessed: 88917
81: stonewalling pairs accessed: 77303
126: stonewalling pairs accessed: 70906
35: stonewalling pairs accessed: 77995
121: stonewalling pairs accessed: 79866
101: stonewalling pairs accessed: 77714
161: stonewalling pairs accessed: 88800
75: stonewalling pairs accessed: 77935
85: stonewalling pairs accessed: 77216
3: stonewalling pairs accessed: 89602
206: stonewalling pairs accessed: 70590
stonewalling pairs accessed min: 69484 max: 91661 -- min data: 3.0 GiB mean data: 3.5 GiB time: 300.1s
WARNING: Expected aggregate file size       = 5170880000000.
WARNING: Stat() of aggregate file size      = 947936063360.
WARNING: Using actual aggregate bytes moved = 947936063360.
WARNING: maybe caused by deadlineForStonewalling
write     2376.96    53023      370.43      45.91      45.91      0.020295   380.32     0.008656   380.33     0   
Max Write: 2376.96 MiB/sec (2492.42 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        2376.96    2376.96    2376.96       0.00   53021.18   53021.18   53021.18       0.00  380.32764     300.08       2626.47     0    220  22    1   0     1        1         0    0 500000    47008    47008  904022.2 POSIX      0
Finished            : Sat Jul 11 11:21:55 2020
mdtest_easy_delete
-- started at 07/11/2020 11:47:55 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-r' '-F' '-P' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_easy' '-n' '40000' '-u' '-L' '-a' 'POSIX' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_easy-stonewall' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 62.3 TiB   Used FS: 22.1%   Inodes: 23.2 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :      20786.547      20786.501      20786.528          0.015
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.359          0.359          0.359          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        423.352        423.351        423.351          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          2.786          2.786          2.786          0.000
-- finished at 07/11/2020 11:55:01 --

mdtest_easy_stat
-- started at 07/11/2020 11:44:14 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-T' '-F' '-P' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_easy' '-n' '40000' '-u' '-L' '-a' 'POSIX' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_easy-stonewall' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 62.3 TiB   Used FS: 22.1%   Inodes: 23.2 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :      79743.502      79743.291      79743.413          0.057
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :        110.354        110.354        110.354          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/11/2020 11:46:04 --

mdtest_easy_write
-- started at 07/11/2020 11:10:04 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_easy' '-n' '40000' '-u' '-L' '-a' 'POSIX' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 61.9 TiB   Used FS: 24.6%   Inodes: 12.2 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files
Continue stonewall hit min: 21565 max: 40000 avg: 38093.5 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :      26721.455      26721.393      26721.431          0.019
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA      27858.441             NA
   Tree creation             :         25.554         25.554         25.554          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        329.324        329.323        329.324          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.827             NA
   Tree creation             :          0.039          0.039          0.039          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/11/2020 11:15:34 --

mdtest_hard_delete
-- started at 07/11/2020 11:56:54 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 62.3 TiB   Used FS: 22.1%   Inodes: 14.8 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :       5751.760       5751.746       5751.754          0.004
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :         11.859         11.859         11.859          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        540.424        540.422        540.423          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.084          0.084          0.084          0.000
-- finished at 07/11/2020 12:05:55 --

mdtest_hard_read
-- started at 07/11/2020 11:55:02 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 62.3 TiB   Used FS: 22.1%   Inodes: 14.8 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :      27647.483      27647.402      27647.444          0.021
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :        112.429        112.429        112.429          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/11/2020 11:56:54 --

mdtest_hard_stat
-- started at 07/11/2020 11:47:18 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 62.3 TiB   Used FS: 22.1%   Inodes: 23.2 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :      83659.391      83659.116      83659.238          0.072
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         37.155         37.155         37.155          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/11/2020 11:47:55 --

mdtest_hard_write
-- started at 07/11/2020 11:21:55 --

mdtest-3.3.0+dev was launched with 220 total task(s) on 10 node(s)
Command line used: /IO500/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard' '-n' '40000' '-x' '/mnt/cephfs/datafiles/2020.07.11-10.09.37-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1' '-W' '300'
Path: /mnt/cephfs/datafiles/2020.07.11-10.09.37-scr
FS: 62.3 TiB   Used FS: 22.1%   Inodes: 20.2 Mi   Used Inodes: 100.0%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
220 tasks, 8800000 files
Continue stonewall hit min: 13686 max: 14129 avg: 13890.5 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :       9996.626       9996.601       9996.617          0.008
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA      10185.581             NA
   Tree creation             :         83.389         83.389         83.389          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        310.944        310.943        310.943          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.023             NA
   Tree creation             :          0.012          0.012          0.012          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/11/2020 11:27:06 --

result_summary
[RESULT] BW   phase 1            ior_easy_write               12.881 GiB/s : time 518.13 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               26.722 kiops : time 329.32 seconds
[RESULT] BW   phase 2            ior_hard_write                2.321 GiB/s : time 380.32 seconds
[RESULT] IOPS phase 2         mdtest_hard_write                9.997 kiops : time 310.94 seconds
[RESULT] IOPS phase 3                      find              273.590 kiops : time  43.53 seconds
[RESULT] BW   phase 3             ior_easy_read                6.783 GiB/s : time 983.31 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat               79.744 kiops : time 110.35 seconds
[RESULT] BW   phase 4             ior_hard_read               12.133 GiB/s : time  72.72 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat               83.659 kiops : time  37.15 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               20.786 kiops : time 423.35 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               27.647 kiops : time 112.43 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete                5.752 kiops : time 541.02 seconds
[SCORE] Bandwidth 7.04313 GiB/s : IOPS 33.5666 kiops : TOTAL 15.3758