Palmetto

Institution Clemson University
Client Procs Per Node
Client Operating System Oracle Linux
Client Operating System Version 7.5
Client Kernel Version 3.10.0-862.9.1.el7.x86_64

DATA SERVER

Storage Type HDD
Volatile Memory 256GB
Storage Interface SATA
Network InfiniBand FDR
Software Version 7.1
OS Version 7.5

INFORMATION

Client Nodes 32
Client Total Procs 256
Metadata Nodes 6
Metadata Storage Devices 2
Data Nodes 6
Data Storage Devices 10

METADATA

Easy Write 24.38 kIOP/s
Easy Stat 400.59 kIOP/s
Easy Delete 11.89 kIOP/s
Hard Write 4.98 kIOP/s
Hard Read 14.65 kIOP/s
Hard Stat 69.89 kIOP/s
Hard Delete 3.00 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.
# Please also edit 'extra_description' function.
#set -x

if [ "$1" == "" ]
then
	SCALE=1
else
	SCALE=$1
fi


NP=$(( $SCALE * 16 ))

echo "$SCALE processes per node for $NP processes."

set -euo pipefail  # better error handling

export OFS_MOUNT=/scratch4/jburto2

# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True"  # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True"  # does the creat phase and enables the subsequent read
io500_run_find="True"     
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"  
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="True"  # this one is optional
io500_cleanup_workdir="False"  # this flag is currently ignored. You'll need to clean up your data files manually if you want to.
io500_stonewall_timer=300 # Stonewalling timer, stop with wearout after 300s with default test, set to 0, if you never want to abort...


# to run this benchmark, find and edit each of these functions.
# please also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths    
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional
  run_benchmarks
}

function setup_directories {
  # set directories for where the benchmark files are created and where the results will go.
  # If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it. 
  timestamp=`date +%Y.%m.%d-%H.%M.%S`           # create a uniquifier
  io500_workdir=$OFS_MOUNT/io500/datafiles/io500.$timestamp # directory where the data will be stored
  io500_result_dir=$PWD/results/$timestamp      # the directory where the output results will be kept

  mkdir -p $io500_workdir $io500_result_dir
  mkdir -p ${io500_workdir}/ior_easy ${io500_workdir}/ior_hard 
  mkdir -p ${io500_workdir}/mdt_easy ${io500_workdir}/mdt_hard 
# for ior_easy, large chunks, as few targets as will allow the files to be evenly spread. 
  beegfs-ctl --setpattern --numtargets=3 --chunksize=4m --mount=/scratch4 ${io500_workdir}/ior_easy
# stripe across all OSTs for ior_hard, 64k chunksize
# best pattern is minimal chunksize to fit one I/O in, regardless of RAID stripe.
  beegfs-ctl --setpattern --numtargets=6 --chunksize=64k --mount=/scratch4 ${io500_workdir}/ior_hard 
# turn off striping and use small chunks for mdtest
  beegfs-ctl --setpattern --numtargets=1 --chunksize=64k --mount=/scratch4 ${io500_workdir}/mdt_easy 
  beegfs-ctl --setpattern --numtargets=1 --chunksize=64k --mount=/scratch4 ${io500_workdir}/mdt_hard 
}

function setup_paths {
  # Set the paths to the binaries.  If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpi_prefix="/usr/lib64/openmpi"
  #io500_mpi_prefix="/home/jburto2/openmpi/1.10.7"
  io500_mpirun="$io500_mpi_prefix/bin/mpirun"

  # Run OpenMPI over ethernet to keep the IB network clear for data. Map by node to balance processes.
  # The I/O 500 benchmarks are not heavy on interprocess communication.
  io500_mpiargs="-np $NP --mca btl_tcp_if_exclude ib0 --mca btl ^openib --map-by node --machinefile /home/jburto2/hpccnodelistmpi --prefix $io500_mpi_prefix"
}

function setup_ior_easy {
# 2M writes, 128 GB per proc, file per proc. 
  io500_ior_easy_size=$((120 * 1024 / $SCALE))
  io500_ior_easy_params="-t 4m -b ${io500_ior_easy_size}m -F -a POSIX"
   
}

function setup_mdt_easy {
# one level, 11 directories, unique dir per thread, files only at leaves.
# BeeGFS doesn't have distributed directories, so more directories = better distribution. 
#  io500_mdtest_easy_params="-z 1 -b 6 -u -L" 
  io500_mdtest_easy_params="-u -L" 
  io500_mdtest_easy_files_per_proc=800000
}

function setup_ior_hard {
  if [ "$SCALE" == "1" ] 
  then
	# One process per node is significantly faster because of buffering.
  	io500_ior_hard_writes_per_proc=2200000
  else	
  	io500_ior_hard_writes_per_proc=$(( 2200000 / $SCALE ))
  fi

  io500_ior_hard_other_options=" -a POSIX"

}

function setup_mdt_hard {
# Multiple directories might improve mdt_hard slightly, but this test is storage bound, not md bound.
  io500_mdtest_hard_files_per_proc="$(( 150000 / $SCALE ))"
  io500_mdtest_files_per_proc=$(( 150000 / $SCALE )) 
  io500_mdtest_hard_other_options=""
}

function setup_find {
  #
  # setup the find command. This is an area where innovation is allowed.
  #    There are three default options provided. One is a serial find, one is python
  #    parallel version, one is C parallel version.  Current default is to use serial.
  #    But it is very slow. We recommend to either customize or use the C parallel version.
  #    For GPFS, we recommend to use the provided mmfind wrapper described below.
  #    Instructions below.
  #    If a custom approach is used, please provide enough info so others can reproduce.

  # the serial version that should run (SLOWLY) without modification
  #io500_find_mpi="False"
  #io500_find_cmd=$PWD/bin/sfind.sh
  #io500_find_cmd_args=""

  # a parallel version in C, the -s adds a stonewall
  #   for a real run, turn -s (stonewall) off or set it at 300 or more
  #   to prepare this (assuming you've run ./utilities/prepare.sh already):
  #   > cd build/pfind
  #   > ./prepare.sh
  #   > ./compile.sh
  #   > cp pfind ../../bin/ 
  #   If you use io500_find_mpi="True", then this will run with the same
  #   number of MPI nodes and ranks as the other phases.
  #   If you prefer another number, and fewer might be better here,
  #   Then you can set io500_find_mpi to be "False" and write a wrapper
  #   script for this which sets up MPI as you would like.  Then change
  #   io500_find_cmd to point to your wrapper script. 
  io500_find_mpi="True"
  io500_find_cmd="$PWD/bin/pfind"
  io500_find_cmd_args="-s 10000 -r $io500_result_dir/pfind_results"
  
  # for GPFS systems, you should probably use the provided mmfind wrapper 
  # if you used ./utilities/prepare.sh, you'll find this wrapper in ./bin/mmfind.sh
  #io500_find_mpi="False"
  #io500_find_cmd="$PWD/bin/mmfind.sh"
  #io500_find_cmd_args=""
}

function setup_mdreal {
  io500_mdreal_params="-P=5000 -I=1000"
}

function run_benchmarks {
  # Important: source the io500_fixed.sh script.  Do not change it. If you discover
  # a need to change it, please email the mailing list to discuss
  source ./bin/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}

# Add key/value pairs defining your system 
# Feel free to add extra ones if you'd like
function extra_description {
  # top level info
  io500_info_system_name='Palmetto scratch4'      # e.g. Oakforest-PACS
  io500_info_institute_name='Clemson University'   # e.g. JCAHPC
  io500_info_storage_age_in_months='0' # not install date but age since last refresh
  io500_info_storage_install_date='01/15'  # MM/YY
  io500_info_storage_refresh_date='10/18'  # MM/YY
  io500_info_filesysem='BeeGFS'     # e.g. BeeGFS, DataWarp, GPFS, IME, Lustre
  io500_info_filesystem_version='7.1'
  # client side info
  io500_info_num_client_nodes="$(( ${SCALE} * 16 ))"
  io500_info_procs_per_node="1"
  # server side info
  io500_info_num_metadata_server_nodes='6'
  io500_info_num_data_server_nodes='6'
  io500_info_num_data_storage_devices='60'  # if you have 5 data servers, and each has 5 drives, then this number is 25
  io500_info_num_metadata_storage_devices='12'  # if you have 2 metadata servers, and each has 5 drives, then this number is 10
  io500_info_data_storage_type='HDD' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_metadata_storage_type='SSD' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_storage_network='infiniband' # infiniband, omnipath, ethernet, etc
  io500_info_storage_interface='SATA' # SAS, SATA, NVMe, etc
  # miscellaneous
  io500_info_whatever='infiniband'
}

main
ior_easy_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Oct 25 11:10:13 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 4m -b 7680m -F -a POSIX -o /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy/ior_file_easy -O stoneWallingStatusFile=/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy/stonewall
Machine             : Linux ofstest008.ofsdev.clemson.edu
TestID              : 0
StartTime           : Thu Oct 25 11:10:13 2018
Path                : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy
FS                  : 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 256
clients per node    : 7
repetitions         : 1
xfersize            : 4 MiB
blocksize           : 7.50 GiB
aggregate filesize  : 1.88 TiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 2061584302080.
WARNING: Stat() of aggregate file size      = 2019708370944.
WARNING: Using actual aggregate bytes moved = 2019708370944.
read      2774.45    7864320    4096       0.032010   694.17     0.051578   694.24     0   
Max Read:  2774.45 MiB/sec (2909.23 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         2774.45    2774.45    2774.45       0.00     693.61     693.61     693.61       0.00  694.24233     0    256   7    1   1     1        1         0    0      1 8053063680  4194304 1926144.0 POSIX      0
Finished            : Thu Oct 25 11:21:47 2018
ior_easy_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Oct 25 09:23:23 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 4m -b 7680m -F -a POSIX -o /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy/ior_file_easy -O stoneWallingStatusFile=/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux ofstest008.ofsdev.clemson.edu
TestID              : 0
StartTime           : Thu Oct 25 09:23:23 2018
Path                : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy
FS                  : 174.6 TiB   Used FS: 27.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 256
clients per node    : 7
repetitions         : 1
xfersize            : 4 MiB
blocksize           : 7.50 GiB
aggregate filesize  : 1.88 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 1324 max: 1881 -- min data: 5.2 GiB mean data: 6.4 GiB time: 301.2s
WARNING: Expected aggregate file size       = 2061584302080.
WARNING: Stat() of aggregate file size      = 2019708370944.
WARNING: Using actual aggregate bytes moved = 2019708370944.
WARNING: maybe caused by deadlineForStonewalling
write     3249.50    7864320    4096       0.386224   592.11     0.261091   592.75     0   
Max Write: 3249.50 MiB/sec (3407.34 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        3249.50    3249.50    3249.50       0.00     812.37     812.37     812.37       0.00  592.75175     0    256   7    1   1     1        1         0    0      1 8053063680  4194304 1926144.0 POSIX      0
Finished            : Thu Oct 25 09:33:18 2018
ior_hard_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Oct 25 11:24:00 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 137500 -a POSIX -o /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard/IOR_file -O stoneWallingStatusFile=/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard/stonewall
Machine             : Linux ofstest008.ofsdev.clemson.edu
TestID              : 0
StartTime           : Thu Oct 25 11:24:00 2018
Path                : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard
FS                  : 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 137500
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 256
clients per node    : 7
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 1.50 TiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 1654681600000.
WARNING: Stat() of aggregate file size      = 564432953344.
WARNING: Using actual aggregate bytes moved = 564432953344.
read      906.34     45.91      45.91      0.097132   593.80     0.028676   593.91     0   
Max Read:  906.34 MiB/sec (950.36 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read          906.34     906.34     906.34       0.00   20217.04   20217.04   20217.04       0.00  593.91327     0    256   7    1   0     1        1         0    0 137500    47008    47008  538285.2 POSIX      0
Finished            : Thu Oct 25 11:33:54 2018
ior_hard_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Oct 25 10:07:51 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 137500 -a POSIX -o /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard/IOR_file -O stoneWallingStatusFile=/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux ofstest008.ofsdev.clemson.edu
TestID              : 0
StartTime           : Thu Oct 25 10:07:51 2018
Path                : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard
FS                  : 174.6 TiB   Used FS: 28.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 137500
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 256
clients per node    : 7
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 1.50 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 69 max: 46903 -- min data: 0.0 GiB mean data: 0.4 GiB time: 303.3s
WARNING: Expected aggregate file size       = 1654681600000.
WARNING: Stat() of aggregate file size      = 564432953344.
WARNING: Using actual aggregate bytes moved = 564432953344.
WARNING: maybe caused by deadlineForStonewalling
write     163.23     45.91      45.91      0.271099   3296.96    0.413501   3297.64    0   
Max Write: 163.23 MiB/sec (171.16 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write         163.23     163.23     163.23       0.00    3641.14    3641.14    3641.14       0.00 3297.63526     0    256   7    1   0     1        1         0    0 137500    47008    47008  538285.2 POSIX      0
Finished            : Thu Oct 25 11:02:49 2018
mdtest_easy_delete
-- started at 10/25/2018 11:34:26 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-r" "-F" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_easy" "-n" "800000" "-u" "-L" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_easy-stonewall"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 204800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :      11887.045      11887.045      11887.045          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.054          0.054          0.054          0.000

-- finished at 10/25/2018 12:45:22 --
mdtest_easy_stat
-- started at 10/25/2018 11:21:51 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-T" "-F" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_easy" "-n" "800000" "-u" "-L" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_easy-stonewall"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 204800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :     400587.011     400587.011     400587.011          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 10/25/2018 11:23:57 --
mdtest_easy_write
-- started at 10/25/2018 09:33:22 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-C" "-F" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_easy" "-n" "800000" "-u" "-L" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_easy-stonewall" "-W" "300"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 28.8%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 204800000 files
stonewall rank 169: 111361 of 196773 
stonewall rank 243: 104705 of 196773 
stonewall rank 206: 71014 of 196773 
stonewall rank 95: 39322 of 196773 
stonewall rank 132: 65188 of 196773 
stonewall rank 58: 56682 of 196773 
stonewall rank 163: 62758 of 196773 
stonewall rank 1: 61676 of 196773 
stonewall rank 7: 64606 of 196773 
stonewall rank 75: 99754 of 196773 
stonewall rank 3: 120044 of 196773 
stonewall rank 111: 64171 of 196773 
stonewall rank 237: 60786 of 196773 
stonewall rank 77: 84466 of 196773 
stonewall rank 81: 99532 of 196773 
stonewall rank 149: 75099 of 196773 
stonewall rank 69: 59904 of 196773 
stonewall rank 185: 122326 of 196773 
stonewall rank 151: 120007 of 196773 
stonewall rank 155: 65029 of 196773 
stonewall rank 106: 56257 of 196773 
stonewall rank 37: 122900 of 196773 
stonewall rank 225: 126360 of 196773 
stonewall rank 165: 56742 of 196773 
stonewall rank 15: 56342 of 196773 
stonewall rank 143: 75977 of 196773 
stonewall rank 222: 63317 of 196773 
stonewall rank 167: 56098 of 196773 
stonewall rank 229: 99897 of 196773 
stonewall rank 186: 94730 of 196773 
stonewall rank 89: 62471 of 196773 
stonewall rank 217: 76456 of 196773 
stonewall rank 74: 39240 of 196773 
stonewall rank 73: 62349 of 196773 
stonewall rank 223: 74972 of 196773 
stonewall rank 254: 71194 of 196773 
stonewall rank 61: 55311 of 196773 
stonewall rank 148: 56803 of 196773 
stonewall rank 114: 39932 of 196773 
stonewall rank 38: 74844 of 196773 
stonewall rank 126: 60933 of 196773 
stonewall rank 32: 60134 of 196773 
stonewall rank 33: 62010 of 196773 
stonewall rank 188: 56135 of 196773 
stonewall rank 44: 99470 of 196773 
stonewall rank 45: 72938 of 196773 
stonewall rank 239: 76380 of 196773 
stonewall rank 180: 71861 of 196773 
stonewall rank 189: 57654 of 196773 
stonewall rank 40: 129643 of 196773 
stonewall rank 118: 63264 of 196773 
stonewall rank 9: 56187 of 196773 
stonewall rank 52: 38936 of 196773 
stonewall rank 85: 62195 of 196773 
stonewall rank 135: 55589 of 196773 
stonewall rank 187: 58657 of 196773 
stonewall rank 93: 100045 of 196773 
stonewall rank 71: 104405 of 196773 
stonewall rank 200: 159441 of 196773 
stonewall rank 193: 70551 of 196773 
stonewall rank 209: 58522 of 196773 
stonewall rank 226: 56708 of 196773 
stonewall rank 147: 62139 of 196773 
stonewall rank 83: 61361 of 196773 
stonewall rank 172: 62201 of 196773 
stonewall rank 39: 61652 of 196773 
stonewall rank 241: 55524 of 196773 
stonewall rank 246: 72015 of 196773 
stonewall rank 145: 88713 of 196773 
stonewall rank 221: 38907 of 196773 
stonewall rank 181: 74717 of 196773 
stonewall rank 19: 78263 of 196773 
stonewall rank 98: 71495 of 196773 
stonewall rank 17: 61504 of 196773 
stonewall rank 119: 59316 of 196773 
stonewall rank 110: 62502 of 196773 
stonewall rank 192: 62805 of 196773 
stonewall rank 41: 59263 of 196773 
stonewall rank 159: 58550 of 196773 
stonewall rank 157: 61358 of 196773 
stonewall rank 130: 59953 of 196773 
stonewall rank 24: 58932 of 196773 
stonewall rank 127: 39214 of 196773 
stonewall rank 182: 83850 of 196773 
stonewall rank 36: 62919 of 196773 
stonewall rank 255: 60682 of 196773 
stonewall rank 231: 57061 of 196773 
stonewall rank 233: 79265 of 196773 
stonewall rank 230: 70089 of 196773 
stonewall rank 219: 71287 of 196773 
stonewall rank 204: 55400 of 196773 
stonewall rank 115: 57050 of 196773 
stonewall rank 91: 62138 of 196773 
stonewall rank 201: 57160 of 196773 
stonewall rank 184: 57002 of 196773 
stonewall rank 70: 43203 of 196773 
stonewall rank 5: 42742 of 196773 
stonewall rank 11: 110278 of 196773 
stonewall rank 57: 93789 of 196773 
stonewall rank 112: 62989 of 196773 
stonewall rank 56: 59712 of 196773 
stonewall rank 113: 79054 of 196773 
stonewall rank 202: 60578 of 196773 
stonewall rank 53: 56771 of 196773 
stonewall rank 65: 56941 of 196773 
stonewall rank 42: 58321 of 196773 
stonewall rank 107: 56235 of 196773 
stonewall rank 122: 61832 of 196773 
stonewall rank 46: 60777 of 196773 
stonewall rank 196: 120458 of 196773 
stonewall rank 82: 75089 of 196773 
stonewall rank 34: 64248 of 196773 
stonewall rank 139: 56153 of 196773 
stonewall rank 99: 60206 of 196773 
stonewall rank 54: 60998 of 196773 
stonewall rank 4: 93346 of 196773 
stonewall rank 48: 56987 of 196773 
stonewall rank 79: 58540 of 196773 
stonewall rank 238: 39729 of 196773 
stonewall rank 213: 98478 of 196773 
stonewall rank 194: 78874 of 196773 
stonewall rank 218: 61412 of 196773 
stonewall rank 102: 72764 of 196773 
stonewall rank 108: 63030 of 196773 
Continue stonewall hit min: 38421 max: 196773 avg: 67288.2 
stonewall rank 0: 61876 of 196773 
stonewall rank 250: 72912 of 196773 
stonewall rank 153: 54767 of 196773 
stonewall rank 8: 59686 of 196773 
stonewall rank 176: 73020 of 196773 
stonewall rank 78: 93040 of 196773 
stonewall rank 63: 56909 of 196773 
stonewall rank 117: 38823 of 196773 
stonewall rank 28: 58833 of 196773 
stonewall rank 190: 55382 of 196773 
stonewall rank 90: 40103 of 196773 
stonewall rank 144: 73809 of 196773 
stonewall rank 120: 61861 of 196773 
stonewall rank 156: 57358 of 196773 
stonewall rank 227: 58330 of 196773 
stonewall rank 152: 70999 of 196773 
stonewall rank 137: 92495 of 196773 
stonewall rank 121: 61434 of 196773 
stonewall rank 161: 84720 of 196773 
stonewall rank 164: 57930 of 196773 
stonewall rank 173: 60254 of 196773 
stonewall rank 211: 38421 of 196773 
stonewall rank 55: 73192 of 196773 
stonewall rank 116: 55495 of 196773 
stonewall rank 16: 52360 of 196773 
stonewall rank 26: 38657 of 196773 
stonewall rank 247: 43160 of 196773 
stonewall rank 235: 58480 of 196773 
stonewall rank 2: 59011 of 196773 
stonewall rank 100: 83617 of 196773 
stonewall rank 210: 70242 of 196773 
stonewall rank 59: 74472 of 196773 
stonewall rank 150: 80289 of 196773 
stonewall rank 174: 57753 of 196773 
stonewall rank 25: 43251 of 196773 
stonewall rank 13: 80818 of 196773 
stonewall rank 131: 75763 of 196773 
stonewall rank 87: 58592 of 196773 
stonewall rank 62: 55866 of 196773 
stonewall rank 248: 63352 of 196773 
stonewall rank 123: 57028 of 196773 
stonewall rank 128: 60887 of 196773 
stonewall rank 136: 63746 of 196773 
stonewall rank 133: 55898 of 196773 
stonewall rank 205: 94351 of 196773 
stonewall rank 50: 39141 of 196773 
stonewall rank 191: 58688 of 196773 
stonewall rank 207: 55849 of 196773 
stonewall rank 168: 75908 of 196773 
stonewall rank 195: 56246 of 196773 
stonewall rank 198: 58227 of 196773 
stonewall rank 242: 58714 of 196773 
stonewall rank 22: 65560 of 196773 
stonewall rank 129: 62647 of 196773 
stonewall rank 67: 58411 of 196773 
stonewall rank 20: 76057 of 196773 
stonewall rank 170: 43802 of 196773 
stonewall rank 35: 41575 of 196773 
stonewall rank 124: 63808 of 196773 
stonewall rank 197: 64014 of 196773 
stonewall rank 141: 70506 of 196773 
stonewall rank 76: 81296 of 196773 
stonewall rank 215: 38844 of 196773 
stonewall rank 94: 42989 of 196773 
stonewall rank 244: 75514 of 196773 
stonewall rank 224: 81446 of 196773 
stonewall rank 30: 58976 of 196773 
stonewall rank 178: 39142 of 196773 
stonewall rank 234: 43867 of 196773 
stonewall rank 104: 61268 of 196773 
stonewall rank 43: 100005 of 196773 
stonewall rank 96: 62424 of 196773 
stonewall rank 49: 61531 of 196773 
stonewall rank 252: 61668 of 196773 
stonewall rank 47: 66493 of 196773 
stonewall rank 103: 60132 of 196773 
stonewall rank 203: 62862 of 196773 
stonewall rank 12: 63771 of 196773 
stonewall rank 109: 61471 of 196773 
stonewall rank 86: 63386 of 196773 
stonewall rank 166: 78993 of 196773 
stonewall rank 160: 43904 of 196773 
stonewall rank 146: 40942 of 196773 
stonewall rank 177: 73292 of 196773 
stonewall rank 23: 63152 of 196773 
stonewall rank 251: 73502 of 196773 
stonewall rank 10: 59740 of 196773 
stonewall rank 29: 84171 of 196773 
stonewall rank 6: 57074 of 196773 
stonewall rank 66: 59393 of 196773 
stonewall rank 158: 58290 of 196773 
stonewall rank 140: 38638 of 196773 
stonewall rank 18: 62412 of 196773 
stonewall rank 214: 63647 of 196773 
stonewall rank 183: 61461 of 196773 
stonewall rank 154: 60764 of 196773 
stonewall rank 232: 59520 of 196773 
stonewall rank 92: 43537 of 196773 
stonewall rank 72: 76154 of 196773 
stonewall rank 84: 103486 of 196773 
stonewall rank 240: 71014 of 196773 
stonewall rank 220: 71083 of 196773 
stonewall rank 228: 71207 of 196773 
stonewall rank 51: 62316 of 196773 
stonewall rank 101: 40552 of 196773 
stonewall rank 171: 60415 of 196773 
stonewall rank 175: 60618 of 196773 
stonewall rank 245: 82962 of 196773 
stonewall rank 249: 78242 of 196773 
stonewall rank 125: 57752 of 196773 
stonewall rank 27: 76609 of 196773 
stonewall rank 97: 82953 of 196773 
stonewall rank 138: 40615 of 196773 
stonewall rank 162: 70333 of 196773 
stonewall rank 212: 78215 of 196773 
stonewall rank 134: 87874 of 196773 
stonewall rank 64: 79029 of 196773 
stonewall rank 80: 99509 of 196773 
stonewall rank 199: 57939 of 196773 
stonewall rank 60: 87683 of 196773 
stonewall rank 105: 60879 of 196773 
stonewall rank 208: 56774 of 196773 
stonewall rank 236: 57834 of 196773 
stonewall rank 14: 62450 of 196773 
stonewall rank 88: 62483 of 196773 
stonewall rank 179: 61303 of 196773 
stonewall rank 253: 61204 of 196773 
stonewall rank 31: 61080 of 196773 
stonewall rank 142: 75922 of 196773 
stonewall rank 68: 60654 of 196773 
stonewall rank 216: 43852 of 196773 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      99133.094      99133.094      99133.094          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          2.728          2.728          2.728          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 10/25/2018 10:07:48 --
mdtest_hard_delete
-- started at 10/25/2018 12:47:28 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-r" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard" "-n" "9375" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard-stonewall"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :       3000.487       3000.487       3000.487          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.459          0.459          0.459          0.000

-- finished at 10/25/2018 12:57:13 --
mdtest_hard_read
-- started at 10/25/2018 12:45:26 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-E" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard" "-n" "9375" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard-stonewall"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :      14652.318      14652.318      14652.318          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 10/25/2018 12:47:25 --
mdtest_hard_stat
-- started at 10/25/2018 11:33:58 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-T" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard" "-n" "9375" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard-stonewall"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :      69888.810      69888.810      69888.810          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 10/25/2018 11:34:23 --
mdtest_hard_write
-- started at 10/25/2018 11:02:52 --

mdtest-1.9.3 was launched with 256 total task(s) on 36 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-C" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard" "-n" "9375" "-x" "/scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19/mdt_hard-stonewall" "-W" "300"
Path: /scratch4/jburto2/io500/datafiles/io500.2018.10.25-09.23.19
FS: 174.6 TiB   Used FS: 29.1%   Inodes: 0.0 Mi   Used Inodes: -nan%

256 tasks, 2400000 files
stonewall rank 95: 5794 of 6830 
stonewall rank 243: 5812 of 6830 
stonewall rank 58: 5830 of 6830 
stonewall rank 169: 5792 of 6830 
stonewall rank 206: 5828 of 6830 
stonewall rank 237: 5793 of 6830 
stonewall rank 21: 5800 of 6830 
stonewall rank 7: 5871 of 6830 
stonewall rank 77: 5883 of 6830 
stonewall rank 37: 5865 of 6830 
stonewall rank 132: 5790 of 6830 
stonewall rank 83: 5891 of 6830 
stonewall rank 69: 5836 of 6830 
stonewall rank 151: 5908 of 6830 
stonewall rank 1: 5884 of 6830 
stonewall rank 222: 5899 of 6830 
stonewall rank 143: 5852 of 6830 
stonewall rank 155: 5890 of 6830 
stonewall rank 89: 5758 of 6830 
stonewall rank 44: 5873 of 6830 
stonewall rank 19: 5751 of 6830 
stonewall rank 75: 5878 of 6830 
stonewall rank 39: 5913 of 6830 
stonewall rank 217: 5801 of 6830 
stonewall rank 81: 5891 of 6830 
stonewall rank 188: 5876 of 6830 
stonewall rank 126: 5845 of 6830 
stonewall rank 149: 5881 of 6830 
stonewall rank 115: 5887 of 6830 
stonewall rank 118: 5884 of 6830 
stonewall rank 225: 5909 of 6830 
stonewall rank 186: 5883 of 6830 
stonewall rank 119: 5904 of 6830 
stonewall rank 254: 5776 of 6830 
stonewall rank 229: 5897 of 6830 
stonewall rank 163: 5646 of 6830 
stonewall rank 106: 5826 of 6830 
stonewall rank 180: 5791 of 6830 
stonewall rank 3: 5923 of 6830 
stonewall rank 223: 5903 of 6830 
stonewall rank 74: 5897 of 6830 
stonewall rank 61: 5825 of 6830 
stonewall rank 40: 5894 of 6830 
stonewall rank 15: 5760 of 6830 
stonewall rank 227: 5913 of 6830 
stonewall rank 111: 5871 of 6830 
stonewall rank 32: 5806 of 6830 
stonewall rank 192: 5933 of 6830 
stonewall rank 167: 5751 of 6830 
stonewall rank 185: 5898 of 6830 
stonewall rank 200: 5781 of 6830 
stonewall rank 231: 5888 of 6830 
stonewall rank 187: 5907 of 6830 
stonewall rank 135: 5836 of 6830 
stonewall rank 159: 5896 of 6830 
stonewall rank 91: 5812 of 6830 
stonewall rank 52: 5794 of 6830 
stonewall rank 41: 5936 of 6830 
stonewall rank 98: 5811 of 6830 
stonewall rank 114: 5897 of 6830 
stonewall rank 71: 6780 of 6830 
stonewall rank 45: 5878 of 6830 
stonewall rank 172: 5769 of 6830 
stonewall rank 93: 5785 of 6830 
stonewall rank 38: 5870 of 6830 
stonewall rank 53: 5604 of 6830 
stonewall rank 209: 5868 of 6830 
stonewall rank 233: 5903 of 6830 
stonewall rank 112: 5886 of 6830 
stonewall rank 107: 5815 of 6830 
stonewall rank 246: 5787 of 6830 
stonewall rank 241: 5759 of 6830 
stonewall rank 239: 5802 of 6830 
stonewall rank 148: 5873 of 6830 
stonewall rank 24: 5802 of 6830 
stonewall rank 11: 5877 of 6830 
stonewall rank 219: 6739 of 6830 
stonewall rank 78: 5874 of 6830 
stonewall rank 56: 5793 of 6830 
stonewall rank 79: 5881 of 6830 
stonewall rank 99: 5833 of 6830 
stonewall rank 73: 6787 of 6830 
stonewall rank 90: 5666 of 6830 
stonewall rank 85: 5887 of 6830 
stonewall rank 9: 5880 of 6830 
stonewall rank 255: 5791 of 6830 
stonewall rank 153: 5872 of 6830 
stonewall rank 130: 5810 of 6830 
stonewall rank 113: 5903 of 6830 
stonewall rank 157: 5892 of 6830 
stonewall rank 221: 6795 of 6830 
stonewall rank 2: 5880 of 6830 
stonewall rank 165: 5765 of 6830 
stonewall rank 122: 5876 of 6830 
stonewall rank 189: 5878 of 6830 
stonewall rank 145: 6784 of 6830 
stonewall rank 204: 5775 of 6830 
stonewall rank 193: 5894 of 6830 
stonewall rank 190: 5882 of 6830 
stonewall rank 110: 6737 of 6830 
stonewall rank 127: 5720 of 6830 
stonewall rank 46: 5897 of 6830 
stonewall rank 196: 5889 of 6830 
stonewall rank 33: 5830 of 6830 
stonewall rank 202: 5822 of 6830 
Continue stonewall hit min: 5604 max: 6830 avg: 5906.2 
stonewall rank 0: 5888 of 6830 
stonewall rank 184: 6774 of 6830 
stonewall rank 182: 6741 of 6830 
stonewall rank 76: 5882 of 6830 
stonewall rank 139: 5833 of 6830 
stonewall rank 120: 5884 of 6830 
stonewall rank 226: 5905 of 6830 
stonewall rank 36: 6747 of 6830 
stonewall rank 17: 5798 of 6830 
stonewall rank 230: 5905 of 6830 
stonewall rank 48: 5904 of 6830 
stonewall rank 34: 6753 of 6830 
stonewall rank 201: 5725 of 6830 
stonewall rank 65: 5843 of 6830 
stonewall rank 5: 5854 of 6830 
stonewall rank 181: 5802 of 6830 
stonewall rank 102: 5808 of 6830 
stonewall rank 42: 5890 of 6830 
stonewall rank 150: 5894 of 6830 
stonewall rank 213: 5870 of 6830 
stonewall rank 194: 5867 of 6830 
stonewall rank 247: 5677 of 6830 
stonewall rank 250: 5783 of 6830 
stonewall rank 54: 5811 of 6830 
stonewall rank 238: 5703 of 6830 
stonewall rank 28: 5796 of 6830 
stonewall rank 108: 6745 of 6830 
stonewall rank 218: 5832 of 6830 
stonewall rank 176: 5773 of 6830 
stonewall rank 13: 5910 of 6830 
stonewall rank 55: 5852 of 6830 
stonewall rank 57: 5815 of 6830 
stonewall rank 63: 5864 of 6830 
stonewall rank 82: 5896 of 6830 
stonewall rank 128: 5849 of 6830 
stonewall rank 164: 5681 of 6830 
stonewall rank 116: 5881 of 6830 
stonewall rank 70: 5811 of 6830 
stonewall rank 87: 5935 of 6830 
stonewall rank 224: 5859 of 6830 
stonewall rank 235: 5905 of 6830 
stonewall rank 123: 5875 of 6830 
stonewall rank 211: 5869 of 6830 
stonewall rank 156: 5875 of 6830 
stonewall rank 16: 5715 of 6830 
stonewall rank 124: 5889 of 6830 
stonewall rank 144: 5861 of 6830 
stonewall rank 26: 5830 of 6830 
stonewall rank 203: 5800 of 6830 
stonewall rank 161: 5911 of 6830 
stonewall rank 131: 5830 of 6830 
stonewall rank 100: 5780 of 6830 
stonewall rank 8: 5901 of 6830 
stonewall rank 10: 5910 of 6830 
stonewall rank 50: 5887 of 6830 
stonewall rank 152: 5853 of 6830 
stonewall rank 137: 5830 of 6830 
stonewall rank 173: 5824 of 6830 
stonewall rank 191: 5892 of 6830 
stonewall rank 198: 5877 of 6830 
stonewall rank 4: 5868 of 6830 
stonewall rank 174: 5819 of 6830 
stonewall rank 25: 5842 of 6830 
stonewall rank 215: 5820 of 6830 
stonewall rank 35: 6722 of 6830 
stonewall rank 248: 5844 of 6830 
stonewall rank 197: 5896 of 6830 
stonewall rank 30: 5781 of 6830 
stonewall rank 62: 5875 of 6830 
stonewall rank 59: 5825 of 6830 
stonewall rank 205: 5791 of 6830 
stonewall rank 67: 5840 of 6830 
stonewall rank 210: 5869 of 6830 
stonewall rank 141: 5839 of 6830 
stonewall rank 43: 5936 of 6830 
stonewall rank 133: 5825 of 6830 
stonewall rank 12: 5887 of 6830 
stonewall rank 136: 5848 of 6830 
stonewall rank 252: 5813 of 6830 
stonewall rank 109: 6725 of 6830 
stonewall rank 92: 5780 of 6830 
stonewall rank 170: 5767 of 6830 
stonewall rank 94: 5774 of 6830 
stonewall rank 104: 5836 of 6830 
stonewall rank 47: 5916 of 6830 
stonewall rank 207: 5810 of 6830 
stonewall rank 49: 5886 of 6830 
stonewall rank 178: 5798 of 6830 
stonewall rank 129: 5804 of 6830 
stonewall rank 96: 5813 of 6830 
stonewall rank 168: 5780 of 6830 
stonewall rank 244: 5779 of 6830 
stonewall rank 121: 5887 of 6830 
stonewall rank 22: 5780 of 6830 
stonewall rank 86: 5885 of 6830 
stonewall rank 117: 5894 of 6830 
stonewall rank 242: 5796 of 6830 
stonewall rank 234: 5913 of 6830 
stonewall rank 20: 5804 of 6830 
stonewall rank 183: 6718 of 6830 
stonewall rank 29: 5797 of 6830 
stonewall rank 18: 5794 of 6830 
stonewall rank 160: 5878 of 6830 
stonewall rank 158: 5899 of 6830 
stonewall rank 103: 5804 of 6830 
stonewall rank 51: 5915 of 6830 
stonewall rank 251: 5814 of 6830 
stonewall rank 220: 6770 of 6830 
stonewall rank 66: 5819 of 6830 
stonewall rank 195: 5853 of 6830 
stonewall rank 140: 5821 of 6830 
stonewall rank 154: 5880 of 6830 
stonewall rank 177: 5788 of 6830 
stonewall rank 72: 6786 of 6830 
stonewall rank 214: 5853 of 6830 
stonewall rank 166: 5803 of 6830 
stonewall rank 240: 5786 of 6830 
stonewall rank 232: 5888 of 6830 
stonewall rank 146: 6759 of 6830 
stonewall rank 84: 5890 of 6830 
stonewall rank 228: 5916 of 6830 
stonewall rank 101: 5814 of 6830 
stonewall rank 125: 5879 of 6830 
stonewall rank 27: 5796 of 6830 
stonewall rank 6: 5873 of 6830 
stonewall rank 138: 5813 of 6830 
stonewall rank 80: 5888 of 6830 
stonewall rank 175: 5789 of 6830 
stonewall rank 199: 5906 of 6830 
stonewall rank 249: 5776 of 6830 
stonewall rank 97: 5790 of 6830 
stonewall rank 64: 5842 of 6830 
stonewall rank 88: 5912 of 6830 
stonewall rank 212: 5824 of 6830 
stonewall rank 171: 5788 of 6830 
stonewall rank 162: 5924 of 6830 
stonewall rank 245: 5815 of 6830 
stonewall rank 236: 5916 of 6830 
stonewall rank 23: 5761 of 6830 
stonewall rank 14: 5891 of 6830 
stonewall rank 60: 5829 of 6830 
stonewall rank 31: 5809 of 6830 
stonewall rank 134: 5844 of 6830 
stonewall rank 105: 5785 of 6830 
stonewall rank 208: 5850 of 6830 
stonewall rank 142: 5868 of 6830 
stonewall rank 179: 5807 of 6830 
stonewall rank 253: 5790 of 6830 
stonewall rank 68: 5851 of 6830 
stonewall rank 216: 5857 of 6830 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :       6834.189       6834.189       6834.189          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :         87.137         87.137         87.137          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 10/25/2018 11:08:43 --
result_summary
[RESULT] BW   phase 1            ior_easy_write                3.173 GB/s : time 592.75 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               99.133 kiops : time 2069.85 seconds
[RESULT] BW   phase 2            ior_hard_write                0.159 GB/s : time 3297.64 seconds
[RESULT] IOPS phase 2         mdtest_hard_write                6.834 kiops : time 354.48 seconds
[RESULT] IOPS phase 3                      find              606.340 kiops : time  85.96 seconds
[RESULT] BW   phase 3             ior_easy_read                2.709 GB/s : time 694.24 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat              400.587 kiops : time 129.23 seconds
[RESULT] BW   phase 4             ior_hard_read                0.885 GB/s : time 593.91 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat               69.889 kiops : time  28.78 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               11.887 kiops : time 4259.68 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               14.652 kiops : time 122.57 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete                3.000 kiops : time 588.21 seconds
[SCORE] Bandwidth 1.04947 GB/s : IOPS 39.5696 kiops : TOTAL 6.44415