MOGON II

Institution Johannes Gutenberg University Mainz
Client Procs Per Node
Client Operating System CentOS
Client Operating System Version
Client Kernel Version 3.10.0-1127.8.2.el7.x86_64

DATA SERVER

Storage Type Intel SSD DC S4600 Series
Volatile Memory
Storage Interface SATA
Network Omnipath
Software Version 0.8dev
OS Version

INFORMATION

Client Nodes 10
Client Total Procs 160
Metadata Nodes 200
Metadata Storage Devices 1
Data Nodes 200
Data Storage Devices 1

METADATA

Easy Write 1,462.02 kIOP/s
Easy Stat 1,454.74 kIOP/s
Easy Delete 782.98 kIOP/s
Hard Write 249.10 kIOP/s
Hard Read 284.09 kIOP/s
Hard Stat 1,493.59 kIOP/s
Hard Delete 273.36 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.

function setup_paths {
  # Set the paths to the binaries and how to launch MPI jobs.
  # If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpiexec"
  io500_mpiargs="-np 160 --map-by node --hostfile /home/vef/hostfile -x LD_PRELOAD=/gpfs/fs1/home/vef/scp/gekkofs/build/src/client/libgkfs_intercept.so -x LIBGKFS_HOSTS_FILE=/lustre/miifs01/project/m2_zdvresearch/vef/gkfs_hostfile -x FI_PSM2_DISCONNECT=1 -x PSM2_MULTI_EP=1"
  export PATH=/gpfs/fs1/home/vef/io500_bins:${PATH}
}

function setup_directories {
  local workdir
  local resultdir
  local ts

  # set directories where benchmark files are created and where the results go
  # If you want to set up stripe tuning on your output directories or anything
  # similar, then this is the right place to do it.  This creates the output
  # directories for both the app run and the script run.

  timestamp=$(date +%Y.%m.%d-%H.%M.%S)           # create a uniquifier
  [ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # directory where the data will be stored
  workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
  io500_workdir=$workdir-scr
  [ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # the directory where the output results will be kept
  resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
  io500_result_dir=$resultdir-scr

  LIBGKFS_HOSTS_FILE=/lustre/miifs01/project/m2_zdvresearch/vef/gkfs_hostfile LD_PRELOAD=/gpfs/fs1/home/vef/scp/gekkofs/build/src/client/libgkfs_intercept.so mkdir -p $workdir-{scr,app}/{ior_easy,ior_hard,mdtest_easy,mdtest_hard} $resultdir-{scr,app}

  mkdir -p $workdir-{scr,app} $resultdir-{scr,app}
}

# you should not edit anything below this line
set -eo pipefail  # better error handling

io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
  echo "error: ini file must be specified.  usage: $0 "
  exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
  echo "error: ini file '$io500_ini' not found or empty"
  exit 2
fi

function get_ini_section_param() {
  local section="$1"
  local param="$2"
  local inside=false

  while read LINE; do
    LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
    $inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
    [[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
    ! $inside && continue
    #echo $LINE | awk -F = "/^$param/ { print \$2 }"
    if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
      # echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
      echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
      return
    fi
  done < $io500_ini
  echo ""
}

function get_ini_param() {
  local section="$1"
  local param="$2"
  local default="$3"

  # try and get the most-specific param first, then more generic params
  val=$(get_ini_section_param $section $param)
  [ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
  [ -n "$val" ] || val="$(get_ini_section_param global $param)"

  echo "${val:-$default}" |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}

function get_ini_run_param() {
  local section="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param $section noRun)

  # logic is reversed from "noRun=TRUE" to "run=False"
  [[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}

function get_ini_global_param() {
  local param="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param global $param |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')

  echo "${val:-$default}"
}

# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"

# to run this benchmark, find and edit each of these functions.  Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional

  run_benchmarks

  if [[ ! -s "system-information.txt" ]]; then
    echo "Warning: please create a system-information.txt description by"
    echo "copying the information from https://vi4io.org/io500-info-creator/"
  else
    cp "system-information.txt" $io500_result_dir
  fi

  create_tarball
}

function setup_ior_easy {
  local params

  io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
  val=$(get_ini_param ior-easy API POSIX)
  [ -n "$val" ] && params+=" -a $val"
  val="$(get_ini_param ior-easy transferSize)"
  [ -n "$val" ] && params+=" -t $val"
  val="$(get_ini_param ior-easy hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-easy posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_easy_params="$params"
  echo -n ""
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves

  val=$(get_ini_param mdtest-easy n 1000000)
  [ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
  val=$(get_ini_param mdtest-easy API POSIX)
  [ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
  val=$(get_ini_param mdtest-easy posix.odirect)
  [ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
  echo -n ""
}

function setup_ior_hard {
  local params

  io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
  io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
  val="$(get_ini_param ior-hard hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-hard posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_hard_api_specific_options="$params"
  echo -n ""
}

function setup_mdt_hard {
  val=$(get_ini_param mdtest-hard n 1000000)
  [ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
  io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
  io500_mdtest_hard_api_specific_options=""
  echo -n ""
}

function setup_find {
  val="$(get_ini_param find external-script)"
  [ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
    io500_find_cmd="$val"
  # uses stonewalling, run pfind
  io500_find_cmd_args="$(get_ini_param find external-extra-args)"
  echo -n ""
}

function setup_mdreal {
  echo -n ""
}

function run_benchmarks {
  local app_first=$((RANDOM % 100))
  local app_rc=0

  # run the app and C version in random order to try and avoid bias
  (( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?

  # Important: source the io500_fixed.sh script.  Do not change it. If you
  # discover a need to change it, please email the mailing list to discuss.
  source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
    tee $io500_result_dir/io-500-summary.$timestamp.txt

  (( $app_first >= 50 )) && return $app_rc

  echo "The io500.sh was run"
  echo
  echo "Running the C version of the benchmark now"
  # run the app and C version in random order to try and avoid bias
  $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}

create_tarball() {
  local sourcedir=$(dirname $io500_result_dir)
  local fname=$(basename ${io500_result_dir%-scr})
  local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz

  cp -v $0 $io500_ini $io500_result_dir
  tar czf $tarball -C $sourcedir $fname-{app,scr}
  echo "Created result tarball $tarball"
}

# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
  # UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
  # EXAMPLE:
  # io500_info_system_name='xxx'
  # DO NOT ADD IT HERE
  :
}

main
ior_easy_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Tue Jul 14 01:22:06 2020
Command line        : /home/vef/git/io500-app/bin/ior -r -R -a POSIX -t 64m -v -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/vef_mountdir/io500-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/vef_mountdir/io500-scr/ior_easy/stonewall
Machine             : Linux x0003.mogon
Start time skew across all tasks: 243197.02 sec
TestID              : 0
StartTime           : Tue Jul 14 01:22:06 2020
Path                : /tmp/vef_mountdir/io500-scr/ior_easy
FS                  : 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%
Participating tasks: 160
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /tmp/vef_mountdir/io500-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 160
clients per node    : 16
repetitions         : 1
xfersize            : 64 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 1513.67 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing read performance test: Tue Jul 14 01:22:06 2020

WARNING: Expected aggregate file size       = 1664299827200000.
WARNING: Stat() of aggregate file size      = 22183506083840.
WARNING: Using actual aggregate bytes moved = 22183506083840.
read      23026      359.79     0.005928    10158080000 65536      0.004237   918.77     0.000246   918.77     0   
Max Read:  23026.26 MiB/sec (24144.79 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        23026.26   23026.26   23026.26       0.00     359.79     359.79     359.79       0.00  918.76998         NA            NA     0    160  16    1   1     1        1         0    0      1 10401873920000 67108864 21155840.0 POSIX      0
Finished            : Tue Jul 14 01:37:25 2020
ior_easy_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Tue Jul 14 00:57:44 2020
Command line        : /home/vef/git/io500-app/bin/ior -w -a POSIX -t 64m -v -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/vef_mountdir/io500-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/vef_mountdir/io500-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux x0003.mogon
Start time skew across all tasks: 243197.02 sec
TestID              : 0
StartTime           : Tue Jul 14 00:57:44 2020
Path                : /tmp/vef_mountdir/io500-scr/ior_easy
FS                  : 83.2 TiB   Used FS: 0.1%   Inodes: 0.0 Mi   Used Inodes: -nan%
Participating tasks: 160
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /tmp/vef_mountdir/io500-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 160
clients per node    : 16
repetitions         : 1
xfersize            : 64 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 1513.67 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing write performance test: Tue Jul 14 00:57:45 2020
83: stonewalling pairs accessed: 1877
127: stonewalling pairs accessed: 1856
93: stonewalling pairs accessed: 2018
82: stonewalling pairs accessed: 1828
153: stonewalling pairs accessed: 1931
151: stonewalling pairs accessed: 1992
25: stonewalling pairs accessed: 1848
45: stonewalling pairs accessed: 1830
105: stonewalling pairs accessed: 1834
135: stonewalling pairs accessed: 1939
44: stonewalling pairs accessed: 1838
24: stonewalling pairs accessed: 1865
37: stonewalling pairs accessed: 1970
43: stonewalling pairs accessed: 1935
123: stonewalling pairs accessed: 1873
20: stonewalling pairs accessed: 1860
65: stonewalling pairs accessed: 1842
143: stonewalling pairs accessed: 1878
29: stonewalling pairs accessed: 1843
104: stonewalling pairs accessed: 1816
18: stonewalling pairs accessed: 2017
98: stonewalling pairs accessed: 1968
130: stonewalling pairs accessed: 1963
110: stonewalling pairs accessed: 1995
53: stonewalling pairs accessed: 1938
55: stonewalling pairs accessed: 2066
115: stonewalling pairs accessed: 1983
0: stonewalling pairs accessed: 1854
9: stonewalling pairs accessed: 1811
69: stonewalling pairs accessed: 1893
79: stonewalling pairs accessed: 2002
8: stonewalling pairs accessed: 1858
138: stonewalling pairs accessed: 1990
149: stonewalling pairs accessed: 1814
119: stonewalling pairs accessed: 2030
19: stonewalling pairs accessed: 2035
109: stonewalling pairs accessed: 1801
94: stonewalling pairs accessed: 1999
4: stonewalling pairs accessed: 1828
3: stonewalling pairs accessed: 1871
2: stonewalling pairs accessed: 1868
32: stonewalling pairs accessed: 2063
158: stonewalling pairs accessed: 1933
102: stonewalling pairs accessed: 1827
14: stonewalling pairs accessed: 2013
59: stonewalling pairs accessed: 1994
139: stonewalling pairs accessed: 2025
154: stonewalling pairs accessed: 1914
27: stonewalling pairs accessed: 1851
52: stonewalling pairs accessed: 1948
58: stonewalling pairs accessed: 2055
140: stonewalling pairs accessed: 1839
71: stonewalling pairs accessed: 2017
50: stonewalling pairs accessed: 1979
85: stonewalling pairs accessed: 1859
1: stonewalling pairs accessed: 1910
114: stonewalling pairs accessed: 1982
157: stonewalling pairs accessed: 1951
64: stonewalling pairs accessed: 1901
41: stonewalling pairs accessed: 1849
67: stonewalling pairs accessed: 1879
101: stonewalling pairs accessed: 1846
7: stonewalling pairs accessed: 1865
107: stonewalling pairs accessed: 1813
137: stonewalling pairs accessed: 1990
144: stonewalling pairs accessed: 1875
49: stonewalling pairs accessed: 1876
36: stonewalling pairs accessed: 1900
23: stonewalling pairs accessed: 1894
77: stonewalling pairs accessed: 2025
141: stonewalling pairs accessed: 1890
84: stonewalling pairs accessed: 1886
30: stonewalling pairs accessed: 1961
96: stonewalling pairs accessed: 1962
73: stonewalling pairs accessed: 1994
6: stonewalling pairs accessed: 1862
13: stonewalling pairs accessed: 2046
80: stonewalling pairs accessed: 1873
125: stonewalling pairs accessed: 1817
108: stonewalling pairs accessed: 1877
42: stonewalling pairs accessed: 1827
113: stonewalling pairs accessed: 1972
129: stonewalling pairs accessed: 1839
136: stonewalling pairs accessed: 1959
66: stonewalling pairs accessed: 1861
26: stonewalling pairs accessed: 1832
46: stonewalling pairs accessed: 1875
62: stonewalling pairs accessed: 1871
122: stonewalling pairs accessed: 1859
74: stonewalling pairs accessed: 1951
131: stonewalling pairs accessed: 1938
68: stonewalling pairs accessed: 1869
87: stonewalling pairs accessed: 1832
11: stonewalling pairs accessed: 1938
133: stonewalling pairs accessed: 1994
146: stonewalling pairs accessed: 1823
112: stonewalling pairs accessed: 1987
33: stonewalling pairs accessed: 1939
106: stonewalling pairs accessed: 1813
28: stonewalling pairs accessed: 1830
132: stonewalling pairs accessed: 1951
54: stonewalling pairs accessed: 1915
75: stonewalling pairs accessed: 1950
148: stonewalling pairs accessed: 1869
147: stonewalling pairs accessed: 1851
126: stonewalling pairs accessed: 1804
22: stonewalling pairs accessed: 1852
145: stonewalling pairs accessed: 1842
95: stonewalling pairs accessed: 1976
142: stonewalling pairs accessed: 1815
10: stonewalling pairs accessed: 1980
81: stonewalling pairs accessed: 1842
56: stonewalling pairs accessed: 2003
17: stonewalling pairs accessed: 1916
21: stonewalling pairs accessed: 1863
128: stonewalling pairs accessed: 1875
35: stonewalling pairs accessed: 1938
12: stonewalling pairs accessed: 1972
63: stonewalling pairs accessed: 1787
60: stonewalling pairs accessed: 1841
38: stonewalling pairs accessed: 2009
155: stonewalling pairs accessed: 1911
31: stonewalling pairs accessed: 2003
118: stonewalling pairs accessed: 1972
15: stonewalling pairs accessed: 1999
103: stonewalling pairs accessed: 1905
89: stonewalling pairs accessed: 1847
159: stonewalling pairs accessed: 1944
120: stonewalling pairs accessed: 1867
91: stonewalling pairs accessed: 2017
97: stonewalling pairs accessed: 1969
100: stonewalling pairs accessed: 1855
72: stonewalling pairs accessed: 2014
70: stonewalling pairs accessed: 2013
51: stonewalling pairs accessed: 1922
150: stonewalling pairs accessed: 1990
99: stonewalling pairs accessed: 1928
88: stonewalling pairs accessed: 1897
47: stonewalling pairs accessed: 1892
117: stonewalling pairs accessed: 1935
111: stonewalling pairs accessed: 1910
61: stonewalling pairs accessed: 1895
152: stonewalling pairs accessed: 2011
92: stonewalling pairs accessed: 2026
90: stonewalling pairs accessed: 1925
48: stonewalling pairs accessed: 1890
57: stonewalling pairs accessed: 2000
39: stonewalling pairs accessed: 1992
78: stonewalling pairs accessed: 1986
5: stonewalling pairs accessed: 1825
124: stonewalling pairs accessed: 1866
40: stonewalling pairs accessed: 1869
121: stonewalling pairs accessed: 1778
76: stonewalling pairs accessed: 1942
34: stonewalling pairs accessed: 1974
134: stonewalling pairs accessed: 1888
16: stonewalling pairs accessed: 1974
156: stonewalling pairs accessed: 1979
86: stonewalling pairs accessed: 1872
stonewalling pairs accessed min: 1778 max: 2066 -- min data: 111.1 GiB mean data: 119.7 GiB time: 300.2s
116: stonewalling pairs accessed: 1983
WARNING: Expected aggregate file size       = 1664299827200000.
WARNING: Stat() of aggregate file size      = 22183506083840.
WARNING: Using actual aggregate bytes moved = 22183506083840.
WARNING: maybe caused by deadlineForStonewalling
write     64626      1009.78    0.002085    10158080000 65536      0.000976   327.36     0.000209   327.36     0   
Max Write: 64625.68 MiB/sec (67764.94 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       64625.68   64625.68   64625.68       0.00    1009.78    1009.78    1009.78       0.00  327.35964     300.23      65320.22     0    160  16    1   1     1        1         0    0      1 10401873920000 67108864 21155840.0 POSIX      0
Finished            : Tue Jul 14 01:03:12 2020
ior_hard_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Tue Jul 14 01:43:10 2020
Command line        : /home/vef/git/io500-app/bin/ior -r -R -s 10000000 -a POSIX -v -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/vef_mountdir/io500-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/vef_mountdir/io500-scr/ior_hard/stonewall
Machine             : Linux x0003.mogon
Start time skew across all tasks: 243197.02 sec
TestID              : 0
StartTime           : Tue Jul 14 01:43:10 2020
Path                : /tmp/vef_mountdir/io500-scr/ior_hard
FS                  : 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%
Participating tasks: 160
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /tmp/vef_mountdir/io500-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 10000000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 160
clients per node    : 16
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 68.41 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing read performance test: Tue Jul 14 01:43:10 2020

WARNING: Expected aggregate file size       = 75212800000000.
WARNING: Stat() of aggregate file size      = 924621035520.
WARNING: Using actual aggregate bytes moved = 924621035520.
read      4567       101984     192.87      45.91      45.91      0.202885   192.87     0.000255   193.07     0   
Max Read:  4567.19 MiB/sec (4789.05 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         4567.19    4567.19    4567.19       0.00  101877.27  101877.27  101877.27       0.00  193.06995         NA            NA     0    160  16    1   0     1        1         0    0 10000000    47008    47008  881787.3 POSIX      0
Finished            : Tue Jul 14 01:46:23 2020
ior_hard_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Tue Jul 14 01:09:01 2020
Command line        : /home/vef/git/io500-app/bin/ior -w -s 10000000 -a POSIX -v -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/vef_mountdir/io500-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/vef_mountdir/io500-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux x0003.mogon
Start time skew across all tasks: 243197.02 sec
TestID              : 0
StartTime           : Tue Jul 14 01:09:01 2020
Path                : /tmp/vef_mountdir/io500-scr/ior_hard
FS                  : 83.2 TiB   Used FS: 24.3%   Inodes: 0.0 Mi   Used Inodes: -nan%
Participating tasks: 160
Using reorderTasks '-C' (useful to avoid read cache in client)

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /tmp/vef_mountdir/io500-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 10000000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 10
tasks               : 160
clients per node    : 16
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 68.41 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
Commencing write performance test: Tue Jul 14 01:09:01 2020
132: stonewalling pairs accessed: 122923
139: stonewalling pairs accessed: 122923
35: stonewalling pairs accessed: 122921
115: stonewalling pairs accessed: 122924
92: stonewalling pairs accessed: 122924
118: stonewalling pairs accessed: 122924
98: stonewalling pairs accessed: 122924
50: stonewalling pairs accessed: 122873
10: stonewalling pairs accessed: 122923
94: stonewalling pairs accessed: 122924
99: stonewalling pairs accessed: 122924
47: stonewalling pairs accessed: 122873
12: stonewalling pairs accessed: 122923
38: stonewalling pairs accessed: 122923
74: stonewalling pairs accessed: 122924
75: stonewalling pairs accessed: 122924
157: stonewalling pairs accessed: 122923
79: stonewalling pairs accessed: 122925
130: stonewalling pairs accessed: 122924
70: stonewalling pairs accessed: 122924
78: stonewalling pairs accessed: 122925
152: stonewalling pairs accessed: 122923
25: stonewalling pairs accessed: 122922
150: stonewalling pairs accessed: 122923
59: stonewalling pairs accessed: 122924
72: stonewalling pairs accessed: 122924
159: stonewalling pairs accessed: 122923
65: stonewalling pairs accessed: 122924
158: stonewalling pairs accessed: 122923
76: stonewalling pairs accessed: 122924
154: stonewalling pairs accessed: 122923
26: stonewalling pairs accessed: 122922
4: stonewalling pairs accessed: 122924
15: stonewalling pairs accessed: 122923
66: stonewalling pairs accessed: 122924
14: stonewalling pairs accessed: 122923
155: stonewalling pairs accessed: 122923
58: stonewalling pairs accessed: 122924
1: stonewalling pairs accessed: 122924
114: stonewalling pairs accessed: 122925
52: stonewalling pairs accessed: 122924
151: stonewalling pairs accessed: 122923
134: stonewalling pairs accessed: 122924
13: stonewalling pairs accessed: 122923
116: stonewalling pairs accessed: 122925
21: stonewalling pairs accessed: 122922
23: stonewalling pairs accessed: 122922
55: stonewalling pairs accessed: 122924
18: stonewalling pairs accessed: 122923
84: stonewalling pairs accessed: 122925
30: stonewalling pairs accessed: 122922
22: stonewalling pairs accessed: 122922
135: stonewalling pairs accessed: 122924
28: stonewalling pairs accessed: 122922
51: stonewalling pairs accessed: 122924
29: stonewalling pairs accessed: 122922
16: stonewalling pairs accessed: 122923
7: stonewalling pairs accessed: 122924
39: stonewalling pairs accessed: 122874
113: stonewalling pairs accessed: 122925
6: stonewalling pairs accessed: 122924
17: stonewalling pairs accessed: 122923
110: stonewalling pairs accessed: 122925
34: stonewalling pairs accessed: 122922
83: stonewalling pairs accessed: 122925
112: stonewalling pairs accessed: 122925
36: stonewalling pairs accessed: 122922
27: stonewalling pairs accessed: 122922
140: stonewalling pairs accessed: 122924
119: stonewalling pairs accessed: 122925
32: stonewalling pairs accessed: 122922
136: stonewalling pairs accessed: 122924
143: stonewalling pairs accessed: 122924
82: stonewalling pairs accessed: 122925
138: stonewalling pairs accessed: 122924
149: stonewalling pairs accessed: 122924
146: stonewalling pairs accessed: 122924
107: stonewalling pairs accessed: 122925
33: stonewalling pairs accessed: 122922
142: stonewalling pairs accessed: 122924
85: stonewalling pairs accessed: 122925
148: stonewalling pairs accessed: 122924
144: stonewalling pairs accessed: 122924
109: stonewalling pairs accessed: 122925
147: stonewalling pairs accessed: 122924
31: stonewalling pairs accessed: 122922
9: stonewalling pairs accessed: 122924
41: stonewalling pairs accessed: 122874
44: stonewalling pairs accessed: 122874
108: stonewalling pairs accessed: 122925
81: stonewalling pairs accessed: 122925
100: stonewalling pairs accessed: 122925
104: stonewalling pairs accessed: 122925
90: stonewalling pairs accessed: 122925
19: stonewalling pairs accessed: 122923
20: stonewalling pairs accessed: 122923
93: stonewalling pairs accessed: 122925
86: stonewalling pairs accessed: 122925
111: stonewalling pairs accessed: 122925
120: stonewalling pairs accessed: 122925
24: stonewalling pairs accessed: 122923
89: stonewalling pairs accessed: 122925
103: stonewalling pairs accessed: 122925
102: stonewalling pairs accessed: 122925
80: stonewalling pairs accessed: 122926
88: stonewalling pairs accessed: 122925
106: stonewalling pairs accessed: 122925
87: stonewalling pairs accessed: 122925
0: stonewalling pairs accessed: 122925
141: stonewalling pairs accessed: 122924
54: stonewalling pairs accessed: 122924
49: stonewalling pairs accessed: 122874
40: stonewalling pairs accessed: 122875
153: stonewalling pairs accessed: 122924
42: stonewalling pairs accessed: 122874
48: stonewalling pairs accessed: 122874
156: stonewalling pairs accessed: 122924
97: stonewalling pairs accessed: 122925
11: stonewalling pairs accessed: 122924
124: stonewalling pairs accessed: 122925
69: stonewalling pairs accessed: 122925
60: stonewalling pairs accessed: 122925
133: stonewalling pairs accessed: 122925
122: stonewalling pairs accessed: 122925
68: stonewalling pairs accessed: 122925
96: stonewalling pairs accessed: 122925
137: stonewalling pairs accessed: 122925
91: stonewalling pairs accessed: 122925
64: stonewalling pairs accessed: 122925
129: stonewalling pairs accessed: 122925
43: stonewalling pairs accessed: 122874
2: stonewalling pairs accessed: 122925
8: stonewalling pairs accessed: 122925
46: stonewalling pairs accessed: 122874
77: stonewalling pairs accessed: 122926
101: stonewalling pairs accessed: 122925
73: stonewalling pairs accessed: 122925
128: stonewalling pairs accessed: 122925
126: stonewalling pairs accessed: 122925
67: stonewalling pairs accessed: 122925
71: stonewalling pairs accessed: 122925
3: stonewalling pairs accessed: 122925
127: stonewalling pairs accessed: 122925
121: stonewalling pairs accessed: 122925
123: stonewalling pairs accessed: 122925
117: stonewalling pairs accessed: 122926
63: stonewalling pairs accessed: 122925
57: stonewalling pairs accessed: 122925
131: stonewalling pairs accessed: 122925
61: stonewalling pairs accessed: 122925
95: stonewalling pairs accessed: 122925
105: stonewalling pairs accessed: 122925
125: stonewalling pairs accessed: 122925
145: stonewalling pairs accessed: 122925
5: stonewalling pairs accessed: 122925
53: stonewalling pairs accessed: 122925
45: stonewalling pairs accessed: 122874
62: stonewalling pairs accessed: 122925
56: stonewalling pairs accessed: 122934
37: stonewalling pairs accessed: 122933
stonewalling pairs accessed min: 122873 max: 122934 -- min data: 5.4 GiB mean data: 5.4 GiB time: 300.0s
WARNING: Expected aggregate file size       = 75212800000000.
WARNING: Stat() of aggregate file size      = 924621035520.
WARNING: Using actual aggregate bytes moved = 924621035520.
WARNING: maybe caused by deadlineForStonewalling
write     2936.24    65541      300.07      45.91      45.91      0.203654   300.11     0.000220   300.31     0   
Max Write: 2936.24 MiB/sec (3078.87 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        2936.24    2936.24    2936.24       0.00   65496.81   65496.81   65496.81       0.00  300.31141     300.01       2938.88     0    160  16    1   0     1        1         0    0 10000000    47008    47008  881787.3 POSIX      0
Finished            : Tue Jul 14 01:14:01 2020
mdtest_easy_delete
-- started at 07/14/2020 01:47:20 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-r' '-F' '-P' '-d' '/tmp/vef_mountdir/io500-scr/mdt_easy' '-n' '5000000' '-u' '-L' '-a' 'POSIX' '-x' '/tmp/vef_mountdir/io500-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 800000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :     782981.276     782981.099     782981.182          0.038
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.066          0.066          0.066          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        633.920        633.919        633.920          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :         15.156         15.156         15.156          0.000
-- finished at 07/14/2020 01:58:10 --

mdtest_easy_stat
-- started at 07/14/2020 01:37:27 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-T' '-F' '-P' '-d' '/tmp/vef_mountdir/io500-scr/mdt_easy' '-n' '5000000' '-u' '-L' '-a' 'POSIX' '-x' '/tmp/vef_mountdir/io500-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 800000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :    1454741.591    1454740.952    1454741.285          0.143
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :        341.193        341.193        341.193          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/14/2020 01:43:08 --

mdtest_easy_write
-- started at 07/14/2020 01:03:14 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/tmp/vef_mountdir/io500-scr/mdt_easy' '-n' '5000000' '-u' '-L' '-a' 'POSIX' '-x' '/tmp/vef_mountdir/io500-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 24.4%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 800000000 files
Continue stonewall hit min: 2888421 max: 3102169 avg: 3022274.6 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :    1462020.867    1462020.474    1462020.662          0.100
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA    1624333.428             NA
   Tree creation             :          0.435          0.435          0.435          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        339.494        339.494        339.494          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        297.700             NA
   Tree creation             :          2.300          2.300          2.300          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/14/2020 01:08:58 --

mdtest_hard_delete
-- started at 07/14/2020 02:02:51 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/vef_mountdir/io500-scr/mdt_hard' '-n' '1000000' '-x' '/tmp/vef_mountdir/io500-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 160000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :     273356.504     273356.369     273356.435          0.028
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          3.469          3.469          3.469          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        288.185        288.185        288.185          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.288          0.288          0.288          0.000
-- finished at 07/14/2020 02:07:40 --

mdtest_hard_read
-- started at 07/14/2020 01:58:12 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/vef_mountdir/io500-scr/mdt_hard' '-n' '1000000' '-x' '/tmp/vef_mountdir/io500-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 160000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :     284090.092     284090.035     284090.065          0.014
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :        277.297        277.297        277.297          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/14/2020 02:02:49 --

mdtest_hard_stat
-- started at 07/14/2020 01:46:25 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/vef_mountdir/io500-scr/mdt_hard' '-n' '1000000' '-x' '/tmp/vef_mountdir/io500-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 25.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 160000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :    1493590.739    1493588.415    1493589.643          0.580
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         52.744         52.744         52.744          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/14/2020 01:47:18 --

mdtest_hard_write
-- started at 07/14/2020 01:14:03 --

mdtest-3.3.0+dev was launched with 160 total task(s) on 10 node(s)
Command line used: /home/vef/git/io500-app/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/vef_mountdir/io500-scr/mdt_hard' '-n' '1000000' '-x' '/tmp/vef_mountdir/io500-scr/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1' '-W' '300'
Path: /tmp/vef_mountdir/io500-scr
FS: 83.2 TiB   Used FS: 25.4%   Inodes: 0.0 Mi   Used Inodes: -nan%

Nodemap: 1000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000100000000010000000001000000000
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
160 tasks, 160000000 files
Continue stonewall hit min: 477049 max: 492358 avg: 484043.5 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :     249105.382     249105.258     249105.301          0.035
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA     258156.614             NA
   Tree creation             :       1949.835       1949.835       1949.835          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        316.241        316.241        316.241          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.000             NA
   Tree creation             :          0.001          0.001          0.001          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/14/2020 01:19:22 --

result_summary
[RESULT] BW   phase 1            ior_easy_write               63.111 GiB/s : time 327.36 seconds
[RESULT] IOPS phase 1         mdtest_easy_write             1462.020 kiops : time 339.49 seconds
[RESULT] BW   phase 2            ior_hard_write                2.867 GiB/s : time 300.11 seconds
[RESULT] IOPS phase 2         mdtest_hard_write              249.105 kiops : time 316.24 seconds
[RESULT] IOPS phase 3                      find             3542.870 kiops : time 162.33 seconds
[RESULT] BW   phase 3             ior_easy_read               22.486 GiB/s : time 918.77 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat             1454.740 kiops : time 341.19 seconds
[RESULT] BW   phase 4             ior_hard_read                4.460 GiB/s : time 192.87 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat             1493.590 kiops : time  52.74 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete              782.981 kiops : time 633.92 seconds
[RESULT] IOPS phase 7          mdtest_hard_read              284.090 kiops : time 277.30 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete              273.357 kiops : time 290.65 seconds
[SCORE] Bandwidth 11.6068 GiB/s : IOPS 801.596 kiops : TOTAL 96.4571