Apocrita

Institution Queen Mary, University Of London
Client Procs Per Node
Client Operating System CentOS
Client Operating System Version 7.4
Client Kernel Version 3.10.0-693.11.6.el7.x86_64

DATA SERVER

Storage Type HD
Volatile Memory
Storage Interface SAS
Network Ethernet 40Gbps
Software Version 4.2.3-8
OS Version CentOS 7.3

INFORMATION

Client Nodes 10
Client Total Procs 240
Metadata Nodes 4
Metadata Storage Devices 8
Data Nodes 4
Data Storage Devices 21

METADATA

Easy Write 10.60 kIOP/s
Easy Stat 68.56 kIOP/s
Easy Delete 21.92 kIOP/s
Hard Write 3.21 kIOP/s
Hard Read 4.50 kIOP/s
Hard Stat 5.89 kIOP/s
Hard Delete 3.49 kIOP/s

Submitted Files

io500
#!/bin/bash
#!/bin/bash
#$ -cwd
#$ -pe parallel 240
#$ -l infiniband=sdv-i
#$ -l h_rt=4:0:0

#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.
# Please also edit 'extra_description' function.

module load intelmpi
set -euo pipefail  # better error handling

# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True"  # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True"  # does the creat phase and enables the subsequent read
io500_run_find="True"
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="True"  # this one is optional
io500_cleanup_workdir="False"  # this flag is currently ignored. You'll need to clean up your data files manually if you want to.
io500_stonewall_timer=300 # Stonewalling timer, stop with wearout after 300s with default test, set to 0, if you never want to abort...

# to run this benchmark, find and edit each of these functions.
# please also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional
  run_benchmarks
}

function setup_directories {
  # set directories for where the benchmark files are created and where the results will go.
  # If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it.
  timestamp=`date +%Y.%m.%d-%H.%M.%S`           # create a uniquifier
  io500_workdir=$PWD/datafiles.gs7k/io500.$timestamp # directory where the data will be stored
  io500_result_dir=$PWD/results/gs7k.$timestamp      # the directory where the output results will be kept
  mkdir -p $io500_workdir $io500_result_dir
}

function setup_paths {
  # Set the paths to the binaries.  If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpirun"
  io500_mpiargs="-np $NSLOTS"
}

function setup_ior_easy {
  # io500_ior_easy_size is the amount of data written per rank in MiB units,
  # but it can be any number as long as it is somehow used to scale the IOR
  # runtime as part of io500_ior_easy_params
  io500_ior_easy_size=4000
  # 2M writes, 2 GB per proc, file per proc
  io500_ior_easy_params="-t 2048k -b ${io500_ior_easy_size}m -F"
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
  io500_mdtest_easy_files_per_proc=25000
}

function setup_ior_hard {
  io500_ior_hard_writes_per_proc=12500
  io500_ior_hard_other_options="" #e.g., -E to keep precreated files using lfs setstripe, or -a MPIIO
}

function setup_mdt_hard {
  io500_mdtest_hard_files_per_proc=10000
  io500_mdtest_hard_other_options=""
}

function setup_find {
  #
  # setup the find command. This is an area where innovation is allowed.
  #    There are three default options provided. One is a serial find, one is python
  #    parallel version, one is C parallel version.  Current default is to use serial.
  #    But it is very slow. We recommend to either customize or use the C parallel version.
  #    For GPFS, we recommend to use the provided mmfind wrapper described below.
  #    Instructions below.
  #    If a custom approach is used, please provide enough info so others can reproduce.

  # the serial version that should run (SLOWLY) without modification
  #io500_find_mpi="False"
  #io500_find_cmd=$PWD/bin/sfind.sh
  #io500_find_cmd_args=""

  # a parallel version in C, the -s adds a stonewall
  #   for a real run, turn -s (stonewall) off or set it at 300 or more
  #   to prepare this (assuming you've run ./utilities/prepare.sh already):
  #   > cd build/pfind
  #   > ./prepare.sh
  #   > ./compile.sh
  #   > cp pfind ../../bin/
  #   If you use io500_find_mpi="True", then this will run with the same
  #   number of MPI nodes and ranks as the other phases.
  #   If you prefer another number, and fewer might be better here,
  #   Then you can set io500_find_mpi to be "False" and write a wrapper
  #   script for this which sets up MPI as you would like.  Then change
  #   io500_find_cmd to point to your wrapper script.
  io500_find_mpi="True"
  io500_find_cmd="$PWD/bin/pfind"
  # uses stonewalling, run pfind 
  io500_find_cmd_args="-s $io500_stonewall_timer -r $io500_result_dir/pfind_results"

  # for GPFS systems, you should probably use the provided mmfind wrapper
  # if you used ./utilities/prepare.sh, you'll find this wrapper in ./bin/mmfind.sh
  # io500_find_mpi="False"
  # io500_find_cmd="$PWD/bin/mmfind.sh"
  # io500_find_cmd_args=""
}

function setup_mdreal {
  io500_mdreal_params="-P=5000 -I=1000"
}

function run_benchmarks {
  # Important: source the io500_fixed.sh script.  Do not change it. If you discover
  # a need to change it, please email the mailing list to discuss
  source ./utilities/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}

# Add key/value pairs defining your system
# Feel free to add extra ones if you'd like
function extra_description {
  # top level info
  io500_info_system_name='Apocrita'      # e.g. Oakforest-PACS
  io500_info_institute_name='QMUL'   # e.g. JCAHPC
  io500_info_storage_age_in_months='10' # not install date but age since last refresh
  io500_info_storage_install_date='11/2016'  # MM/YY
  io500_info_filesystem='GPFS'     # e.g. BeeGFS, DataWarp, GPFS, IME, Lustre
  io500_info_filesystem_version='4.2.3-8'
  io500_info_filesystem_vendor='DDN'
  # client side info
  io500_info_num_client_nodes='10'
  io500_info_procs_per_node='24'
  # server side info
  io500_info_num_metadata_server_nodes='4'
  io500_info_num_data_server_nodes='4'
  io500_info_num_data_storage_devices='210'  # if you have 5 data servers, and each has 5 drives, then this number is 25
  io500_info_num_metadata_storage_devices='16'  # if you have 2 metadata servers, and each has 5 drives, then this number is 10
  io500_info_data_storage_type='HDD' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_metadata_storage_type='SSD' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_storage_network='ethernet' # infiniband, omnipath, ethernet, etc
  io500_info_storage_interface='SAS' # SAS, SATA, NVMe, etc
  # miscellaneous
  io500_info_whatever='DDN, GS7K'
}

main
ior_easy_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Wed Nov  7 16:14:07 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 2048k -b 4000m -F -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy/ior_file_easy -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy/stonewall
Machine             : Linux sdv21
TestID              : 0
StartTime           : Wed Nov  7 16:14:07 2018
Path                : /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy
FS                  : 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 7.0%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 3.91 GiB
aggregate filesize  : 937.50 GiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 1006632960000.
WARNING: Stat() of aggregate file size      = 906472980480.
WARNING: Using actual aggregate bytes moved = 906472980480.
read      2218.84    4096000    2048.00    0.029846   389.58     0.001495   389.61     0   
Max Read:  2218.84 MiB/sec (2326.62 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         2218.84    2218.84    2218.84       0.00    1109.42    1109.42    1109.42       0.00  389.60902     0    240  24    1   1     1        1         0    0      1 4194304000  2097152  864480.0 POSIX      0
Finished            : Wed Nov  7 16:20:37 2018
ior_easy_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Wed Nov  7 15:47:58 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 2048k -b 4000m -F -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy/ior_file_easy -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sdv21
TestID              : 0
StartTime           : Wed Nov  7 15:47:58 2018
Path                : /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy
FS                  : 11.0 TiB   Used FS: 48.2%   Inodes: 104.9 Mi   Used Inodes: 2.9%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 3.91 GiB
aggregate filesize  : 937.50 GiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 1029 max: 1801 -- min data: 2.0 GiB mean data: 2.9 GiB time: 303.2s
WARNING: Expected aggregate file size       = 1006632960000.
WARNING: Stat() of aggregate file size      = 906472980480.
WARNING: Using actual aggregate bytes moved = 906472980480.
WARNING: maybe caused by deadlineForStonewalling
write     2297.80    4096000    2048.00    1.39       374.83     0.005066   376.22     0   
Max Write: 2297.80 MiB/sec (2409.42 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        2297.80    2297.80    2297.80       0.00    1148.90    1148.90    1148.90       0.00  376.22096     0    240  24    1   1     1        1         0    0      1 4194304000  2097152  864480.0 POSIX      0
Finished            : Wed Nov  7 15:54:15 2018
ior_hard_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Wed Nov  7 16:21:34 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 12500 -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard/IOR_file -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard/stonewall
Machine             : Linux sdv21
TestID              : 0
StartTime           : Wed Nov  7 16:21:34 2018
Path                : /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard
FS                  : 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 7.0%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 12500
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 131.34 GiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      422.06     45.91      45.91      0.003059   318.63     0.020323   318.66     0   
Max Read:  422.06 MiB/sec (442.56 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read          422.06     422.06     422.06       0.00    9414.55    9414.55    9414.55       0.00  318.65564     0    240  24    1   0     1        1         0    0  12500    47008    47008  134491.0 POSIX      0
Finished            : Wed Nov  7 16:26:52 2018
ior_hard_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Wed Nov  7 15:59:45 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 12500 -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard/IOR_file -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sdv21
TestID              : 0
StartTime           : Wed Nov  7 15:59:45 2018
Path                : /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard
FS                  : 11.0 TiB   Used FS: 55.7%   Inodes: 104.9 Mi   Used Inodes: 5.9%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 12500
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 131.34 GiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 7510 max: 12500 -- min data: 0.3 GiB mean data: 0.4 GiB time: 300.2s
write     295.26     45.91      45.91      0.009950   455.47     0.011884   455.50     0   
Max Write: 295.26 MiB/sec (309.61 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write         295.26     295.26     295.26       0.00    6586.22    6586.22    6586.22       0.00  455.49621     0    240  24    1   0     1        1         0    0  12500    47008    47008  134491.0 POSIX      0
Finished            : Wed Nov  7 16:07:21 2018
mdtest_easy_delete
-- started at 11/07/2018 16:30:30 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-r" "-F" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_easy" "-n" "25000" "-u" "-L" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_easy-stonewall"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 7.0%

240 tasks, 6000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :      21921.654      21921.654      21921.654          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.443          0.443          0.443          0.000

-- finished at 11/07/2018 16:33:03 --
mdtest_easy_stat
-- started at 11/07/2018 16:20:41 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-T" "-F" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_easy" "-n" "25000" "-u" "-L" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_easy-stonewall"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 7.0%

240 tasks, 6000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :      68554.850      68554.850      68554.850          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/07/2018 16:21:29 --
mdtest_easy_write
-- started at 11/07/2018 15:54:24 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-C" "-F" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_easy" "-n" "25000" "-u" "-L" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_easy-stonewall" "-W" "300"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 55.7%   Inodes: 104.9 Mi   Used Inodes: 2.9%

240 tasks, 6000000 files
stonewall rank 144: 13377 of 13696 
stonewall rank 97: 13368 of 13696 
stonewall rank 145: 13316 of 13696 
stonewall rank 72: 13184 of 13696 
stonewall rank 24: 13383 of 13696 
stonewall rank 1: 13238 of 13696 
stonewall rank 192: 13141 of 13696 
stonewall rank 98: 13397 of 13696 
stonewall rank 168: 13253 of 13696 
stonewall rank 120: 13181 of 13696 
stonewall rank 216: 13187 of 13696 
stonewall rank 48: 13213 of 13696 
stonewall rank 146: 13303 of 13696 
stonewall rank 73: 13088 of 13696 
stonewall rank 25: 13406 of 13696 
stonewall rank 2: 13063 of 13696 
stonewall rank 193: 13250 of 13696 
stonewall rank 99: 13438 of 13696 
stonewall rank 169: 13203 of 13696 
stonewall rank 121: 13169 of 13696 
stonewall rank 217: 13115 of 13696 
stonewall rank 49: 13099 of 13696 
stonewall rank 147: 13310 of 13696 
stonewall rank 74: 13293 of 13696 
stonewall rank 26: 13375 of 13696 
stonewall rank 3: 13107 of 13696 
stonewall rank 194: 13241 of 13696 
stonewall rank 100: 13553 of 13696 
stonewall rank 170: 13485 of 13696 
stonewall rank 122: 13218 of 13696 
stonewall rank 218: 13130 of 13696 
stonewall rank 50: 13160 of 13696 
stonewall rank 148: 13194 of 13696 
stonewall rank 75: 13123 of 13696 
stonewall rank 27: 13226 of 13696 
stonewall rank 4: 13248 of 13696 
stonewall rank 195: 13219 of 13696 
stonewall rank 101: 13672 of 13696 
stonewall rank 171: 13333 of 13696 
stonewall rank 123: 13440 of 13696 
stonewall rank 219: 13090 of 13696 
stonewall rank 51: 13160 of 13696 
stonewall rank 149: 13518 of 13696 
stonewall rank 76: 13300 of 13696 
stonewall rank 28: 13240 of 13696 
stonewall rank 5: 13088 of 13696 
stonewall rank 196: 13160 of 13696 
stonewall rank 102: 13491 of 13696 
stonewall rank 172: 13313 of 13696 
stonewall rank 124: 13217 of 13696 
stonewall rank 220: 13400 of 13696 
stonewall rank 52: 13390 of 13696 
stonewall rank 150: 13141 of 13696 
stonewall rank 77: 13239 of 13696 
stonewall rank 29: 13251 of 13696 
stonewall rank 6: 12996 of 13696 
stonewall rank 197: 13409 of 13696 
stonewall rank 103: 13597 of 13696 
stonewall rank 173: 13365 of 13696 
stonewall rank 125: 13216 of 13696 
stonewall rank 221: 13325 of 13696 
stonewall rank 53: 13168 of 13696 
stonewall rank 151: 13430 of 13696 
stonewall rank 78: 13260 of 13696 
stonewall rank 30: 13360 of 13696 
stonewall rank 7: 13098 of 13696 
stonewall rank 198: 12978 of 13696 
stonewall rank 104: 13602 of 13696 
stonewall rank 174: 13388 of 13696 
stonewall rank 126: 13306 of 13696 
stonewall rank 222: 13313 of 13696 
stonewall rank 54: 13193 of 13696 
stonewall rank 152: 13369 of 13696 
stonewall rank 79: 13176 of 13696 
stonewall rank 31: 13250 of 13696 
stonewall rank 8: 13006 of 13696 
stonewall rank 199: 13178 of 13696 
stonewall rank 105: 13600 of 13696 
stonewall rank 175: 13282 of 13696 
stonewall rank 127: 13264 of 13696 
stonewall rank 223: 13314 of 13696 
stonewall rank 55: 13259 of 13696 
stonewall rank 153: 13277 of 13696 
stonewall rank 80: 13466 of 13696 
stonewall rank 32: 13253 of 13696 
stonewall rank 9: 13115 of 13696 
stonewall rank 200: 13013 of 13696 
stonewall rank 106: 13411 of 13696 
stonewall rank 176: 13448 of 13696 
stonewall rank 128: 13311 of 13696 
stonewall rank 224: 13405 of 13696 
stonewall rank 56: 13275 of 13696 
stonewall rank 154: 13239 of 13696 
stonewall rank 81: 13384 of 13696 
stonewall rank 33: 13308 of 13696 
stonewall rank 10: 13072 of 13696 
stonewall rank 201: 13218 of 13696 
stonewall rank 107: 13357 of 13696 
stonewall rank 177: 13342 of 13696 
stonewall rank 129: 13212 of 13696 
stonewall rank 225: 13315 of 13696 
stonewall rank 57: 13122 of 13696 
stonewall rank 155: 13332 of 13696 
stonewall rank 82: 13236 of 13696 
stonewall rank 34: 13408 of 13696 
stonewall rank 11: 13219 of 13696 
stonewall rank 202: 13006 of 13696 
stonewall rank 109: 13556 of 13696 
stonewall rank 178: 13519 of 13696 
stonewall rank 130: 13264 of 13696 
stonewall rank 226: 13114 of 13696 
stonewall rank 58: 13195 of 13696 
stonewall rank 156: 13396 of 13696 
stonewall rank 83: 13310 of 13696 
stonewall rank 35: 13436 of 13696 
stonewall rank 12: 13068 of 13696 
stonewall rank 203: 13121 of 13696 
stonewall rank 110: 13536 of 13696 
stonewall rank 179: 13395 of 13696 
stonewall rank 131: 13219 of 13696 
stonewall rank 227: 13393 of 13696 
stonewall rank 59: 13040 of 13696 
stonewall rank 157: 13357 of 13696 
stonewall rank 84: 13386 of 13696 
stonewall rank 36: 13418 of 13696 
stonewall rank 13: 13329 of 13696 
stonewall rank 204: 13237 of 13696 
stonewall rank 111: 13662 of 13696 
stonewall rank 180: 13494 of 13696 
stonewall rank 132: 13516 of 13696 
stonewall rank 228: 13557 of 13696 
stonewall rank 60: 13240 of 13696 
stonewall rank 158: 13381 of 13696 
stonewall rank 85: 13410 of 13696 
stonewall rank 37: 13483 of 13696 
stonewall rank 14: 13196 of 13696 
stonewall rank 205: 13345 of 13696 
stonewall rank 112: 13502 of 13696 
stonewall rank 181: 13573 of 13696 
stonewall rank 133: 13413 of 13696 
stonewall rank 229: 13275 of 13696 
stonewall rank 61: 13334 of 13696 
stonewall rank 159: 13487 of 13696 
stonewall rank 86: 13096 of 13696 
stonewall rank 38: 13261 of 13696 
stonewall rank 15: 13259 of 13696 
stonewall rank 206: 13496 of 13696 
stonewall rank 113: 13641 of 13696 
stonewall rank 182: 13131 of 13696 
stonewall rank 134: 13266 of 13696 
stonewall rank 230: 13404 of 13696 
stonewall rank 62: 13388 of 13696 
stonewall rank 160: 13192 of 13696 
stonewall rank 87: 13457 of 13696 
stonewall rank 39: 13454 of 13696 
stonewall rank 16: 13283 of 13696 
stonewall rank 207: 13157 of 13696 
stonewall rank 114: 13589 of 13696 
stonewall rank 183: 13485 of 13696 
stonewall rank 135: 13363 of 13696 
stonewall rank 231: 13286 of 13696 
stonewall rank 63: 13309 of 13696 
stonewall rank 161: 13467 of 13696 
stonewall rank 88: 13414 of 13696 
stonewall rank 40: 13346 of 13696 
stonewall rank 17: 13344 of 13696 
stonewall rank 208: 13269 of 13696 
stonewall rank 115: 13645 of 13696 
stonewall rank 184: 13460 of 13696 
stonewall rank 136: 13240 of 13696 
stonewall rank 232: 13271 of 13696 
stonewall rank 64: 13502 of 13696 
stonewall rank 162: 13454 of 13696 
stonewall rank 89: 13350 of 13696 
stonewall rank 41: 13395 of 13696 
stonewall rank 18: 13187 of 13696 
stonewall rank 209: 13220 of 13696 
stonewall rank 116: 13645 of 13696 
stonewall rank 185: 13332 of 13696 
stonewall rank 137: 13271 of 13696 
stonewall rank 233: 13446 of 13696 
stonewall rank 65: 13412 of 13696 
stonewall rank 163: 13294 of 13696 
stonewall rank 90: 13562 of 13696 
stonewall rank 42: 13381 of 13696 
stonewall rank 19: 13269 of 13696 
stonewall rank 210: 13053 of 13696 
stonewall rank 117: 13634 of 13696 
stonewall rank 186: 13398 of 13696 
stonewall rank 138: 13612 of 13696 
stonewall rank 234: 13247 of 13696 
stonewall rank 66: 13538 of 13696 
stonewall rank 164: 13439 of 13696 
stonewall rank 91: 13360 of 13696 
stonewall rank 43: 13394 of 13696 
stonewall rank 20: 13182 of 13696 
stonewall rank 211: 13138 of 13696 
stonewall rank 118: 13510 of 13696 
stonewall rank 187: 13462 of 13696 
stonewall rank 139: 13350 of 13696 
stonewall rank 235: 13322 of 13696 
stonewall rank 67: 13459 of 13696 
stonewall rank 165: 13476 of 13696 
stonewall rank 92: 13293 of 13696 
stonewall rank 44: 13490 of 13696 
stonewall rank 21: 13230 of 13696 
stonewall rank 212: 13055 of 13696 
stonewall rank 119: 13561 of 13696 
stonewall rank 188: 13478 of 13696 
stonewall rank 140: 13233 of 13696 
stonewall rank 236: 13337 of 13696 
stonewall rank 68: 13282 of 13696 
stonewall rank 166: 13475 of 13696 
stonewall rank 93: 13461 of 13696 
stonewall rank 45: 13234 of 13696 
stonewall rank 22: 13254 of 13696 
stonewall rank 213: 13311 of 13696 
stonewall rank 96: 13405 of 13696 
stonewall rank 189: 13374 of 13696 
stonewall rank 141: 13327 of 13696 
stonewall rank 237: 13562 of 13696 
stonewall rank 69: 13285 of 13696 
stonewall rank 167: 13356 of 13696 
stonewall rank 94: 13375 of 13696 
stonewall rank 46: 13421 of 13696 
stonewall rank 23: 13154 of 13696 
stonewall rank 214: 13271 of 13696 
stonewall rank 190: 13229 of 13696 
stonewall rank 142: 13340 of 13696 
stonewall rank 238: 13338 of 13696 
stonewall rank 70: 13496 of 13696 
stonewall rank 95: 13354 of 13696 
stonewall rank 47: 13502 of 13696 
Continue stonewall hit min: 12978 max: 13696 avg: 13323.2 
stonewall rank 0: 13153 of 13696 
stonewall rank 215: 13243 of 13696 
stonewall rank 191: 13346 of 13696 
stonewall rank 143: 13252 of 13696 
stonewall rank 239: 13456 of 13696 
stonewall rank 71: 13378 of 13696 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      19534.077      19534.077      19534.077          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.394          0.394          0.394          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/07/2018 15:59:34 --
mdtest_hard_delete
-- started at 11/07/2018 16:37:42 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-r" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard-stonewall"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 4.8%

240 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :       3488.128       3488.128       3488.128          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.942          0.942          0.942          0.000

-- finished at 11/07/2018 16:43:31 --
mdtest_hard_read
-- started at 11/07/2018 16:33:08 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-E" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard-stonewall"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 4.9%

240 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :       4501.393       4501.393       4501.393          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/07/2018 16:37:38 --
mdtest_hard_stat
-- started at 11/07/2018 16:27:00 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-T" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard-stonewall"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 58.2%   Inodes: 104.9 Mi   Used Inodes: 7.0%

240 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :       5894.158       5894.158       5894.158          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/07/2018 16:30:26 --
mdtest_hard_write
-- started at 11/07/2018 16:07:25 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-C" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.gs7k/io500.2018.11.07-15.47.43/mdt_hard-stonewall" "-W" "300"
Path: /data/ITSR-Testing/io-500/datafiles.gs7k/io500.2018.11.07-15.47.43
FS: 11.0 TiB   Used FS: 56.8%   Inodes: 104.9 Mi   Used Inodes: 5.9%

240 tasks, 2400000 files
stonewall rank 193: 5041 of 5055 
stonewall rank 168: 3789 of 5055 
stonewall rank 120: 3842 of 5055 
stonewall rank 144: 3905 of 5055 
stonewall rank 72: 4348 of 5055 
stonewall rank 24: 4356 of 5055 
stonewall rank 96: 4332 of 5055 
stonewall rank 194: 5040 of 5055 
stonewall rank 169: 3788 of 5055 
stonewall rank 216: 3928 of 5055 
stonewall rank 145: 3896 of 5055 
stonewall rank 25: 4354 of 5055 
stonewall rank 48: 4414 of 5055 
stonewall rank 97: 4330 of 5055 
stonewall rank 196: 5051 of 5055 
stonewall rank 170: 3788 of 5055 
stonewall rank 121: 3842 of 5055 
stonewall rank 217: 3954 of 5055 
stonewall rank 146: 3895 of 5055 
stonewall rank 73: 4325 of 5055 
stonewall rank 26: 4369 of 5055 
stonewall rank 49: 4407 of 5055 
stonewall rank 98: 4337 of 5055 
stonewall rank 197: 5045 of 5055 
stonewall rank 171: 3790 of 5055 
stonewall rank 122: 3852 of 5055 
stonewall rank 218: 3926 of 5055 
stonewall rank 147: 3898 of 5055 
stonewall rank 74: 4333 of 5055 
stonewall rank 27: 4352 of 5055 
stonewall rank 50: 4408 of 5055 
stonewall rank 99: 4332 of 5055 
stonewall rank 198: 5045 of 5055 
stonewall rank 172: 3787 of 5055 
stonewall rank 123: 3841 of 5055 
stonewall rank 219: 3927 of 5055 
stonewall rank 148: 3911 of 5055 
stonewall rank 75: 4326 of 5055 
stonewall rank 28: 4351 of 5055 
stonewall rank 51: 4409 of 5055 
stonewall rank 100: 4327 of 5055 
stonewall rank 199: 5033 of 5055 
stonewall rank 173: 3786 of 5055 
stonewall rank 124: 3848 of 5055 
stonewall rank 220: 3927 of 5055 
stonewall rank 149: 3902 of 5055 
stonewall rank 76: 4325 of 5055 
stonewall rank 29: 4355 of 5055 
stonewall rank 52: 4421 of 5055 
stonewall rank 101: 4326 of 5055 
stonewall rank 200: 5040 of 5055 
stonewall rank 174: 3789 of 5055 
stonewall rank 125: 3854 of 5055 
stonewall rank 221: 3933 of 5055 
stonewall rank 150: 3895 of 5055 
stonewall rank 77: 4334 of 5055 
stonewall rank 30: 4356 of 5055 
stonewall rank 53: 4414 of 5055 
stonewall rank 102: 4339 of 5055 
stonewall rank 201: 5037 of 5055 
stonewall rank 175: 3794 of 5055 
stonewall rank 126: 3844 of 5055 
stonewall rank 222: 3943 of 5055 
stonewall rank 151: 3920 of 5055 
stonewall rank 78: 4332 of 5055 
stonewall rank 31: 4350 of 5055 
stonewall rank 54: 4414 of 5055 
stonewall rank 103: 4330 of 5055 
stonewall rank 202: 5038 of 5055 
stonewall rank 176: 3800 of 5055 
stonewall rank 127: 3840 of 5055 
stonewall rank 223: 3933 of 5055 
stonewall rank 152: 3907 of 5055 
stonewall rank 79: 4329 of 5055 
stonewall rank 32: 4351 of 5055 
stonewall rank 55: 4409 of 5055 
stonewall rank 104: 4337 of 5055 
stonewall rank 203: 5035 of 5055 
stonewall rank 177: 3793 of 5055 
stonewall rank 128: 3846 of 5055 
stonewall rank 224: 3927 of 5055 
stonewall rank 153: 3897 of 5055 
stonewall rank 80: 4331 of 5055 
stonewall rank 33: 4353 of 5055 
stonewall rank 56: 4410 of 5055 
stonewall rank 105: 4327 of 5055 
stonewall rank 204: 5048 of 5055 
stonewall rank 178: 3788 of 5055 
stonewall rank 129: 3845 of 5055 
stonewall rank 225: 3925 of 5055 
stonewall rank 154: 3906 of 5055 
stonewall rank 81: 4328 of 5055 
stonewall rank 34: 4353 of 5055 
stonewall rank 57: 4410 of 5055 
stonewall rank 106: 4337 of 5055 
stonewall rank 205: 5031 of 5055 
stonewall rank 179: 3787 of 5055 
stonewall rank 130: 3852 of 5055 
stonewall rank 226: 3925 of 5055 
stonewall rank 155: 3899 of 5055 
stonewall rank 82: 4328 of 5055 
stonewall rank 35: 4356 of 5055 
stonewall rank 58: 4408 of 5055 
stonewall rank 107: 4328 of 5055 
stonewall rank 206: 5051 of 5055 
stonewall rank 180: 3801 of 5055 
stonewall rank 131: 3842 of 5055 
stonewall rank 227: 3926 of 5055 
stonewall rank 156: 3895 of 5055 
stonewall rank 83: 4330 of 5055 
stonewall rank 36: 4352 of 5055 
stonewall rank 59: 4407 of 5055 
stonewall rank 108: 4326 of 5055 
stonewall rank 207: 5038 of 5055 
stonewall rank 181: 3800 of 5055 
stonewall rank 132: 3848 of 5055 
stonewall rank 228: 3928 of 5055 
stonewall rank 157: 3895 of 5055 
stonewall rank 84: 4326 of 5055 
stonewall rank 37: 4352 of 5055 
stonewall rank 60: 4407 of 5055 
stonewall rank 109: 4328 of 5055 
stonewall rank 208: 5033 of 5055 
stonewall rank 182: 3791 of 5055 
stonewall rank 133: 3842 of 5055 
stonewall rank 229: 3933 of 5055 
stonewall rank 158: 3895 of 5055 
stonewall rank 85: 4325 of 5055 
stonewall rank 38: 4350 of 5055 
stonewall rank 61: 4416 of 5055 
stonewall rank 110: 4334 of 5055 
stonewall rank 209: 5032 of 5055 
stonewall rank 183: 3786 of 5055 
stonewall rank 134: 3842 of 5055 
stonewall rank 230: 3926 of 5055 
stonewall rank 159: 3898 of 5055 
stonewall rank 86: 4326 of 5055 
stonewall rank 39: 4350 of 5055 
stonewall rank 62: 4411 of 5055 
stonewall rank 111: 4333 of 5055 
stonewall rank 210: 5048 of 5055 
stonewall rank 184: 3790 of 5055 
stonewall rank 135: 3850 of 5055 
stonewall rank 231: 3929 of 5055 
stonewall rank 160: 3896 of 5055 
stonewall rank 87: 4325 of 5055 
stonewall rank 40: 4353 of 5055 
stonewall rank 63: 4417 of 5055 
stonewall rank 112: 4330 of 5055 
stonewall rank 211: 5031 of 5055 
stonewall rank 185: 3801 of 5055 
stonewall rank 136: 3844 of 5055 
stonewall rank 232: 3930 of 5055 
stonewall rank 161: 3896 of 5055 
stonewall rank 88: 4333 of 5055 
stonewall rank 41: 4373 of 5055 
stonewall rank 64: 4421 of 5055 
stonewall rank 113: 4333 of 5055 
stonewall rank 212: 5030 of 5055 
stonewall rank 186: 3791 of 5055 
stonewall rank 137: 3840 of 5055 
stonewall rank 233: 3926 of 5055 
stonewall rank 162: 3899 of 5055 
stonewall rank 89: 4332 of 5055 
stonewall rank 42: 4352 of 5055 
stonewall rank 65: 4408 of 5055 
stonewall rank 114: 4331 of 5055 
stonewall rank 213: 5034 of 5055 
stonewall rank 187: 3786 of 5055 
stonewall rank 138: 3843 of 5055 
stonewall rank 234: 3927 of 5055 
stonewall rank 163: 3896 of 5055 
stonewall rank 90: 4344 of 5055 
stonewall rank 43: 4350 of 5055 
stonewall rank 66: 4418 of 5055 
stonewall rank 115: 4328 of 5055 
stonewall rank 214: 5030 of 5055 
stonewall rank 188: 3794 of 5055 
stonewall rank 139: 3864 of 5055 
stonewall rank 235: 3930 of 5055 
stonewall rank 164: 3901 of 5055 
stonewall rank 91: 4340 of 5055 
stonewall rank 44: 4350 of 5055 
stonewall rank 67: 4409 of 5055 
stonewall rank 116: 4332 of 5055 
stonewall rank 215: 5039 of 5055 
stonewall rank 189: 3795 of 5055 
stonewall rank 140: 3843 of 5055 
stonewall rank 236: 3941 of 5055 
stonewall rank 165: 3896 of 5055 
stonewall rank 92: 4325 of 5055 
stonewall rank 45: 4352 of 5055 
stonewall rank 68: 4414 of 5055 
stonewall rank 117: 4331 of 5055 
stonewall rank 192: 5042 of 5055 
stonewall rank 190: 3787 of 5055 
stonewall rank 141: 3847 of 5055 
stonewall rank 237: 3925 of 5055 
stonewall rank 166: 3895 of 5055 
stonewall rank 93: 4337 of 5055 
stonewall rank 46: 4352 of 5055 
stonewall rank 69: 4406 of 5055 
stonewall rank 118: 4329 of 5055 
stonewall rank 191: 3792 of 5055 
stonewall rank 142: 3841 of 5055 
stonewall rank 238: 3933 of 5055 
stonewall rank 167: 3905 of 5055 
stonewall rank 94: 4329 of 5055 
stonewall rank 47: 4356 of 5055 
stonewall rank 70: 4430 of 5055 
stonewall rank 119: 4342 of 5055 
stonewall rank 143: 3845 of 5055 
stonewall rank 239: 3929 of 5055 
stonewall rank 95: 4326 of 5055 
stonewall rank 71: 4412 of 5055 
Continue stonewall hit min: 3786 max: 5055 avg: 4187.5 
stonewall rank 1: 3937 of 5055 
stonewall rank 2: 3948 of 5055 
stonewall rank 3: 3933 of 5055 
stonewall rank 4: 3938 of 5055 
stonewall rank 5: 3934 of 5055 
stonewall rank 6: 3935 of 5055 
stonewall rank 7: 3934 of 5055 
stonewall rank 8: 3934 of 5055 
stonewall rank 9: 3937 of 5055 
stonewall rank 10: 3940 of 5055 
stonewall rank 11: 3937 of 5055 
stonewall rank 12: 3948 of 5055 
stonewall rank 13: 3934 of 5055 
stonewall rank 14: 3941 of 5055 
stonewall rank 15: 3935 of 5055 
stonewall rank 16: 3939 of 5055 
stonewall rank 17: 3934 of 5055 
stonewall rank 18: 3934 of 5055 
stonewall rank 19: 3950 of 5055 
stonewall rank 20: 3949 of 5055 
stonewall rank 21: 3944 of 5055 
stonewall rank 22: 3943 of 5055 
stonewall rank 23: 3936 of 5055 
stonewall rank 0: 3943 of 5055 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :       6352.963       6352.963       6352.963          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :       1631.390       1631.390       1631.390          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/07/2018 16:13:43 --
result_summary
[RESULT] BW   phase 1            ior_easy_write                2.244 GB/s : time 376.22 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               19.534 kiops : time 317.57 seconds
[RESULT] BW   phase 2            ior_hard_write                0.288 GB/s : time 455.50 seconds
[RESULT] IOPS phase 2         mdtest_hard_write                6.353 kiops : time 383.03 seconds
[RESULT] IOPS phase 3                      find              241.800 kiops : time  18.61 seconds
[RESULT] BW   phase 3             ior_easy_read                2.167 GB/s : time 389.61 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat               68.555 kiops : time  52.30 seconds
[RESULT] BW   phase 4             ior_hard_read                0.412 GB/s : time 318.66 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat                5.894 kiops : time 214.35 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               21.922 kiops : time 157.00 seconds
[RESULT] IOPS phase 7          mdtest_hard_read                4.501 kiops : time 274.83 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete                3.488 kiops : time 352.55 seconds
[SCORE] Bandwidth 0.871876 GB/s : IOPS 15.9427 kiops : TOTAL 3.72828