Apocrita

Institution Queen Mary; University Of London
Client Procs Per Node
Client Operating System CentOS
Client Operating System Version 7.4
Client Kernel Version 3.10.0-693.11.6.el7.x86_64

DATA SERVER

Storage Type SSD,NVMe
Volatile Memory 188g
Storage Interface NVMe
Network Ethernet
Software Version GPFS
OS Version CentOS 7.4

INFORMATION

Client Nodes 10
Client Total Procs 240
Metadata Nodes 3
Metadata Storage Devices 6
Data Nodes 3
Data Storage Devices 6

METADATA

Easy Write 15.39 kIOP/s
Easy Stat 67.24 kIOP/s
Easy Delete 50.48 kIOP/s
Hard Write 5.95 kIOP/s
Hard Read 8.38 kIOP/s
Hard Stat 8.79 kIOP/s
Hard Delete 4.71 kIOP/s

Submitted Files

io500
#!/bin/bash
#!/bin/bash
#$ -cwd
#$ -pe parallel 240
#$ -l infiniband=sdv-ii
#$ -l h_rt=4:0:0

#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.
# Please also edit 'extra_description' function.

module load intelmpi
set -euo pipefail  # better error handling

# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True"  # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True"  # does the creat phase and enables the subsequent read
io500_run_find="True"
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="True"  # this one is optional
io500_cleanup_workdir="False"  # this flag is currently ignored. You'll need to clean up your data files manually if you want to.
io500_stonewall_timer=300 # Stonewalling timer, stop with wearout after 300s with default test, set to 0, if you never want to abort...

# to run this benchmark, find and edit each of these functions.
# please also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional
  run_benchmarks
}

function setup_directories {
  # set directories for where the benchmark files are created and where the results will go.
  # If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it.
  timestamp=`date +%Y.%m.%d-%H.%M.%S`           # create a uniquifier
  io500_workdir=$PWD/datafiles.e8/io500.$timestamp # directory where the data will be stored
  io500_result_dir=$PWD/results/e8.$timestamp      # the directory where the output results will be kept
  mkdir -p $io500_workdir $io500_result_dir
}

function setup_paths {
  # Set the paths to the binaries.  If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpirun"
  io500_mpiargs="-np $NSLOTS"
}

function setup_ior_easy {
  # io500_ior_easy_size is the amount of data written per rank in MiB units,
  # but it can be any number as long as it is somehow used to scale the IOR
  # runtime as part of io500_ior_easy_params
  io500_ior_easy_size=15000
  # 2M writes, 2 GB per proc, file per proc
  io500_ior_easy_params="-t 2048k -b ${io500_ior_easy_size}m -F"
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
  io500_mdtest_easy_files_per_proc=25000
}

function setup_ior_hard {
  io500_ior_hard_writes_per_proc=40000
  io500_ior_hard_other_options="" #e.g., -E to keep precreated files using lfs setstripe, or -a MPIIO
}

function setup_mdt_hard {
  io500_mdtest_hard_files_per_proc=10000
  io500_mdtest_hard_other_options=""
}

function setup_find {
  #
  # setup the find command. This is an area where innovation is allowed.
  #    There are three default options provided. One is a serial find, one is python
  #    parallel version, one is C parallel version.  Current default is to use serial.
  #    But it is very slow. We recommend to either customize or use the C parallel version.
  #    For GPFS, we recommend to use the provided mmfind wrapper described below.
  #    Instructions below.
  #    If a custom approach is used, please provide enough info so others can reproduce.

  # the serial version that should run (SLOWLY) without modification
  #io500_find_mpi="False"
  #io500_find_cmd=$PWD/bin/sfind.sh
  #io500_find_cmd_args=""

  # a parallel version in C, the -s adds a stonewall
  #   for a real run, turn -s (stonewall) off or set it at 300 or more
  #   to prepare this (assuming you've run ./utilities/prepare.sh already):
  #   > cd build/pfind
  #   > ./prepare.sh
  #   > ./compile.sh
  #   > cp pfind ../../bin/
  #   If you use io500_find_mpi="True", then this will run with the same
  #   number of MPI nodes and ranks as the other phases.
  #   If you prefer another number, and fewer might be better here,
  #   Then you can set io500_find_mpi to be "False" and write a wrapper
  #   script for this which sets up MPI as you would like.  Then change
  #   io500_find_cmd to point to your wrapper script.
  io500_find_mpi="True"
  io500_find_cmd="$PWD/bin/pfind"
  # uses stonewalling, run pfind 
  io500_find_cmd_args="-s $io500_stonewall_timer -r $io500_result_dir/pfind_results"

  # for GPFS systems, you should probably use the provided mmfind wrapper
  # if you used ./utilities/prepare.sh, you'll find this wrapper in ./bin/mmfind.sh
  # io500_find_mpi="False"
  # io500_find_cmd="$PWD/bin/mmfind.sh"
  # io500_find_cmd_args=""
}

function setup_mdreal {
  io500_mdreal_params="-P=5000 -I=1000"
}

function run_benchmarks {
  # Important: source the io500_fixed.sh script.  Do not change it. If you discover
  # a need to change it, please email the mailing list to discuss
  source ./utilities/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}

# Add key/value pairs defining your system
# Feel free to add extra ones if you'd like
function extra_description {
  # top level info
  io500_info_system_name='Apocrita'      # e.g. Oakforest-PACS
  io500_info_institute_name='QMUL'   # e.g. JCAHPC
  io500_info_storage_age_in_months='1' # not install date but age since last refresh
  io500_info_storage_install_date='11/2018'  # MM/YY
  io500_info_filesystem='GPFS'     # e.g. BeeGFS, DataWarp, GPFS, IME, Lustre
  io500_info_filesystem_version='4.2.3-8'
  io500_info_filesystem_vendor='E8'
  # client side info
  io500_info_num_client_nodes='10'
  io500_info_procs_per_node='24'
  # server side info
  io500_info_num_metadata_server_nodes='3'
  io500_info_num_data_server_nodes='3'
  io500_info_num_data_storage_devices='1'  # if you have 5 data servers, and each has 5 drives, then this number is 25
  io500_info_num_metadata_storage_devices='1'  # if you have 2 metadata servers, and each has 5 drives, then this number is 10
  io500_info_data_storage_type='SSD/NvMe' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_metadata_storage_type='SSD/NVMe' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_storage_network='ethernet' # infiniband, omnipath, ethernet, etc
  io500_info_storage_interface='NvMe/Infiband' # SAS, SATA, NVMe, etc
  # miscellaneous
  io500_info_whatever='GPFS on a E8 D24'
}

main
ior_easy_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Fri Nov  9 09:24:42 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 2048k -b 15000m -F -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_easy/ior_file_easy -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_easy/stonewall
Machine             : Linux sdv42
TestID              : 0
StartTime           : Fri Nov  9 09:24:42 2018
Path                : /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58/ior_easy
FS                  : 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 6.1%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 14.65 GiB
aggregate filesize  : 3.43 TiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      8965       15360000   2048.00    0.019797   401.52     0.001083   401.54     0   
Max Read:  8965.39 MiB/sec (9400.89 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         8965.39    8965.39    8965.39       0.00    4482.70    4482.70    4482.70       0.00  401.54414     0    240  24    1   1     1        1         0    0      1 15728640000  2097152 3600000.0 POSIX      0
Finished            : Fri Nov  9 09:31:23 2018
ior_easy_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Fri Nov  9 09:00:04 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 2048k -b 15000m -F -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_easy/ior_file_easy -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sdv42
TestID              : 0
StartTime           : Fri Nov  9 09:00:04 2018
Path                : /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58/ior_easy
FS                  : 124.2 TiB   Used FS: 5.7%   Inodes: 133.7 Mi   Used Inodes: 1.2%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 14.65 GiB
aggregate filesize  : 3.43 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 3397 max: 7500 -- min data: 6.6 GiB mean data: 11.5 GiB time: 300.5s
write     9072       15360000   2048.00    1.12       395.71     0.010117   396.82     0   
Max Write: 9072.05 MiB/sec (9512.74 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        9072.05    9072.05    9072.05       0.00    4536.03    4536.03    4536.03       0.00  396.82308     0    240  24    1   1     1        1         0    0      1 15728640000  2097152 3600000.0 POSIX      0
Finished            : Fri Nov  9 09:06:41 2018
ior_hard_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Fri Nov  9 09:32:44 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 40000 -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_hard/IOR_file -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_hard/stonewall
Machine             : Linux sdv42
TestID              : 0
StartTime           : Fri Nov  9 09:32:44 2018
Path                : /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58/ior_hard
FS                  : 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 6.3%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 40000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 420.28 GiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      3641       45.91      45.91      0.002928   118.14     0.061013   118.20     0   
Max Read:  3641.00 MiB/sec (3817.86 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         3641.00    3641.00    3641.00       0.00   81217.26   81217.26   81217.26       0.00  118.20147     0    240  24    1   0     1        1         0    0  40000    47008    47008  430371.1 POSIX      0
Finished            : Fri Nov  9 09:34:42 2018
ior_hard_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Fri Nov  9 09:12:02 2018
Command line        : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 40000 -o /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_hard/IOR_file -O stoneWallingStatusFile=/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux sdv42
TestID              : 0
StartTime           : Fri Nov  9 09:12:02 2018
Path                : /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58/ior_hard
FS                  : 124.2 TiB   Used FS: 8.5%   Inodes: 133.7 Mi   Used Inodes: 4.2%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 40000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 240
clients per node    : 24
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 420.28 GiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 33320 max: 40000 -- min data: 1.5 GiB mean data: 1.6 GiB time: 300.2s
write     1291.57    45.91      45.91      0.007086   333.19     0.015236   333.21     0   
Max Write: 1291.57 MiB/sec (1354.31 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        1291.57    1291.57    1291.57       0.00   28810.26   28810.26   28810.26       0.00  333.21459     0    240  24    1   0     1        1         0    0  40000    47008    47008  430371.1 POSIX      0
Finished            : Fri Nov  9 09:17:35 2018
mdtest_easy_delete
-- started at 11/09/2018 09:39:23 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-r" "-F" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_easy" "-n" "25000" "-u" "-L" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_easy-stonewall"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 6.3%

240 tasks, 6000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :      50478.029      50478.029      50478.029          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.557          0.557          0.557          0.000

-- finished at 11/09/2018 09:40:59 --
mdtest_easy_stat
-- started at 11/09/2018 09:31:28 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-T" "-F" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_easy" "-n" "25000" "-u" "-L" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_easy-stonewall"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 6.3%

240 tasks, 6000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :      67238.393      67238.393      67238.393          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/09/2018 09:32:39 --
mdtest_easy_write
-- started at 11/09/2018 09:06:46 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-C" "-F" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_easy" "-n" "25000" "-u" "-L" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_easy-stonewall" "-W" "300"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.1%   Inodes: 133.7 Mi   Used Inodes: 1.2%

240 tasks, 6000000 files
stonewall rank 1: 19362 of 19886 
stonewall rank 168: 19390 of 19886 
stonewall rank 72: 19326 of 19886 
stonewall rank 97: 19371 of 19886 
stonewall rank 216: 18929 of 19886 
stonewall rank 120: 18988 of 19886 
stonewall rank 2: 19251 of 19886 
stonewall rank 48: 18845 of 19886 
stonewall rank 24: 19123 of 19886 
stonewall rank 144: 19167 of 19886 
stonewall rank 192: 19140 of 19886 
stonewall rank 169: 19343 of 19886 
stonewall rank 73: 19542 of 19886 
stonewall rank 98: 19510 of 19886 
stonewall rank 217: 19048 of 19886 
stonewall rank 121: 19300 of 19886 
stonewall rank 3: 19529 of 19886 
stonewall rank 49: 19325 of 19886 
stonewall rank 25: 19223 of 19886 
stonewall rank 145: 19219 of 19886 
stonewall rank 193: 19235 of 19886 
stonewall rank 170: 19586 of 19886 
stonewall rank 74: 19450 of 19886 
stonewall rank 99: 19172 of 19886 
stonewall rank 218: 18910 of 19886 
stonewall rank 122: 19091 of 19886 
stonewall rank 4: 19330 of 19886 
stonewall rank 50: 19008 of 19886 
stonewall rank 26: 19115 of 19886 
stonewall rank 146: 19092 of 19886 
stonewall rank 194: 19236 of 19886 
stonewall rank 171: 19528 of 19886 
stonewall rank 75: 19161 of 19886 
stonewall rank 100: 19452 of 19886 
stonewall rank 219: 19195 of 19886 
stonewall rank 123: 19231 of 19886 
stonewall rank 5: 19333 of 19886 
stonewall rank 51: 19377 of 19886 
stonewall rank 27: 19459 of 19886 
stonewall rank 147: 19085 of 19886 
stonewall rank 195: 19346 of 19886 
stonewall rank 172: 19474 of 19886 
stonewall rank 76: 19367 of 19886 
stonewall rank 101: 19371 of 19886 
stonewall rank 220: 19066 of 19886 
stonewall rank 124: 19261 of 19886 
stonewall rank 6: 19372 of 19886 
stonewall rank 52: 19175 of 19886 
stonewall rank 28: 19085 of 19886 
stonewall rank 148: 19019 of 19886 
stonewall rank 196: 19228 of 19886 
stonewall rank 173: 19472 of 19886 
stonewall rank 77: 19133 of 19886 
stonewall rank 102: 19411 of 19886 
stonewall rank 221: 19226 of 19886 
stonewall rank 125: 19283 of 19886 
stonewall rank 7: 19320 of 19886 
stonewall rank 53: 19254 of 19886 
stonewall rank 29: 19455 of 19886 
stonewall rank 149: 19453 of 19886 
stonewall rank 197: 19251 of 19886 
stonewall rank 174: 19452 of 19886 
stonewall rank 78: 19196 of 19886 
stonewall rank 103: 19462 of 19886 
stonewall rank 222: 19115 of 19886 
stonewall rank 126: 19130 of 19886 
stonewall rank 8: 19402 of 19886 
stonewall rank 54: 18902 of 19886 
stonewall rank 30: 19311 of 19886 
stonewall rank 150: 18863 of 19886 
stonewall rank 198: 19401 of 19886 
stonewall rank 175: 19458 of 19886 
stonewall rank 79: 19491 of 19886 
stonewall rank 104: 19377 of 19886 
stonewall rank 223: 18835 of 19886 
stonewall rank 127: 19419 of 19886 
stonewall rank 9: 19331 of 19886 
stonewall rank 55: 18936 of 19886 
stonewall rank 31: 19358 of 19886 
stonewall rank 151: 19187 of 19886 
stonewall rank 199: 19332 of 19886 
stonewall rank 176: 19329 of 19886 
stonewall rank 80: 19158 of 19886 
stonewall rank 105: 19320 of 19886 
stonewall rank 224: 19021 of 19886 
stonewall rank 128: 19171 of 19886 
stonewall rank 10: 19117 of 19886 
stonewall rank 56: 19112 of 19886 
stonewall rank 32: 19310 of 19886 
stonewall rank 152: 19557 of 19886 
stonewall rank 200: 19210 of 19886 
stonewall rank 177: 19471 of 19886 
stonewall rank 81: 19270 of 19886 
stonewall rank 106: 19424 of 19886 
stonewall rank 225: 19064 of 19886 
stonewall rank 129: 19246 of 19886 
stonewall rank 11: 19619 of 19886 
stonewall rank 57: 19219 of 19886 
stonewall rank 33: 19126 of 19886 
stonewall rank 153: 19295 of 19886 
stonewall rank 201: 19261 of 19886 
stonewall rank 178: 19475 of 19886 
stonewall rank 82: 19450 of 19886 
stonewall rank 107: 19403 of 19886 
stonewall rank 226: 19025 of 19886 
stonewall rank 130: 19193 of 19886 
stonewall rank 12: 19561 of 19886 
stonewall rank 58: 18923 of 19886 
stonewall rank 34: 19210 of 19886 
stonewall rank 154: 19252 of 19886 
stonewall rank 202: 19401 of 19886 
stonewall rank 179: 19422 of 19886 
stonewall rank 83: 19251 of 19886 
stonewall rank 108: 19628 of 19886 
stonewall rank 227: 19010 of 19886 
stonewall rank 131: 19365 of 19886 
stonewall rank 13: 19697 of 19886 
stonewall rank 59: 19093 of 19886 
stonewall rank 35: 19181 of 19886 
stonewall rank 155: 19270 of 19886 
stonewall rank 203: 19363 of 19886 
stonewall rank 180: 19684 of 19886 
stonewall rank 84: 19536 of 19886 
stonewall rank 109: 19587 of 19886 
stonewall rank 228: 19519 of 19886 
stonewall rank 132: 19435 of 19886 
stonewall rank 14: 19581 of 19886 
stonewall rank 60: 19022 of 19886 
stonewall rank 36: 19337 of 19886 
stonewall rank 156: 19217 of 19886 
stonewall rank 204: 19162 of 19886 
stonewall rank 181: 19403 of 19886 
stonewall rank 85: 19505 of 19886 
stonewall rank 110: 19579 of 19886 
stonewall rank 229: 19276 of 19886 
stonewall rank 133: 19382 of 19886 
stonewall rank 15: 19451 of 19886 
stonewall rank 61: 19182 of 19886 
stonewall rank 37: 19369 of 19886 
stonewall rank 157: 19102 of 19886 
stonewall rank 205: 19212 of 19886 
stonewall rank 182: 19471 of 19886 
stonewall rank 86: 19511 of 19886 
stonewall rank 111: 19599 of 19886 
stonewall rank 230: 18966 of 19886 
stonewall rank 134: 19463 of 19886 
stonewall rank 16: 19518 of 19886 
stonewall rank 62: 19167 of 19886 
stonewall rank 38: 19301 of 19886 
stonewall rank 158: 19294 of 19886 
stonewall rank 206: 19144 of 19886 
stonewall rank 183: 19605 of 19886 
stonewall rank 87: 19695 of 19886 
stonewall rank 112: 19251 of 19886 
stonewall rank 231: 19250 of 19886 
stonewall rank 135: 19230 of 19886 
stonewall rank 17: 19575 of 19886 
stonewall rank 63: 19320 of 19886 
stonewall rank 39: 19398 of 19886 
stonewall rank 159: 19193 of 19886 
stonewall rank 207: 19074 of 19886 
stonewall rank 184: 19446 of 19886 
stonewall rank 88: 19302 of 19886 
stonewall rank 114: 19866 of 19886 
stonewall rank 232: 18919 of 19886 
stonewall rank 136: 19618 of 19886 
stonewall rank 18: 19432 of 19886 
stonewall rank 64: 19057 of 19886 
stonewall rank 40: 19487 of 19886 
stonewall rank 160: 19414 of 19886 
stonewall rank 208: 19225 of 19886 
stonewall rank 185: 19552 of 19886 
stonewall rank 89: 19518 of 19886 
stonewall rank 115: 19648 of 19886 
stonewall rank 233: 19085 of 19886 
stonewall rank 137: 19191 of 19886 
stonewall rank 19: 19637 of 19886 
stonewall rank 65: 19201 of 19886 
stonewall rank 41: 19416 of 19886 
stonewall rank 161: 19255 of 19886 
stonewall rank 209: 18999 of 19886 
stonewall rank 186: 19653 of 19886 
stonewall rank 90: 19521 of 19886 
stonewall rank 116: 19688 of 19886 
stonewall rank 234: 19181 of 19886 
stonewall rank 138: 19312 of 19886 
stonewall rank 20: 19600 of 19886 
stonewall rank 66: 19216 of 19886 
stonewall rank 42: 19241 of 19886 
stonewall rank 162: 19198 of 19886 
stonewall rank 210: 19254 of 19886 
stonewall rank 187: 19463 of 19886 
stonewall rank 91: 19398 of 19886 
stonewall rank 117: 19490 of 19886 
stonewall rank 235: 18986 of 19886 
stonewall rank 139: 19313 of 19886 
stonewall rank 21: 19434 of 19886 
stonewall rank 67: 19125 of 19886 
stonewall rank 43: 19303 of 19886 
stonewall rank 163: 19407 of 19886 
stonewall rank 211: 19099 of 19886 
stonewall rank 188: 19400 of 19886 
stonewall rank 92: 19274 of 19886 
stonewall rank 118: 19222 of 19886 
stonewall rank 236: 19167 of 19886 
stonewall rank 140: 19195 of 19886 
stonewall rank 22: 19634 of 19886 
stonewall rank 68: 19166 of 19886 
stonewall rank 44: 19182 of 19886 
stonewall rank 164: 19404 of 19886 
stonewall rank 212: 19292 of 19886 
stonewall rank 189: 19453 of 19886 
stonewall rank 93: 19635 of 19886 
stonewall rank 119: 19725 of 19886 
stonewall rank 237: 19081 of 19886 
stonewall rank 141: 19124 of 19886 
stonewall rank 23: 19464 of 19886 
stonewall rank 69: 19025 of 19886 
stonewall rank 45: 19287 of 19886 
stonewall rank 165: 19187 of 19886 
stonewall rank 213: 19140 of 19886 
stonewall rank 190: 19322 of 19886 
stonewall rank 94: 19339 of 19886 
stonewall rank 96: 19382 of 19886 
stonewall rank 238: 19294 of 19886 
stonewall rank 142: 19192 of 19886 
Continue stonewall hit min: 18835 max: 19886 avg: 19307.5 
stonewall rank 0: 19294 of 19886 
stonewall rank 70: 19450 of 19886 
stonewall rank 46: 19133 of 19886 
stonewall rank 166: 19099 of 19886 
stonewall rank 214: 19319 of 19886 
stonewall rank 191: 19349 of 19886 
stonewall rank 95: 19311 of 19886 
stonewall rank 239: 19135 of 19886 
stonewall rank 143: 19326 of 19886 
stonewall rank 71: 19464 of 19886 
stonewall rank 47: 19437 of 19886 
stonewall rank 167: 19425 of 19886 
stonewall rank 215: 19431 of 19886 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      19420.855      19420.855      19420.855          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.775          0.775          0.775          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/09/2018 09:11:56 --
mdtest_hard_delete
-- started at 11/09/2018 09:45:51 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-r" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard-stonewall"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 2.9%

240 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :       4709.266       4709.266       4709.266          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.800          0.800          0.800          0.000

-- finished at 11/09/2018 09:54:18 --
mdtest_hard_read
-- started at 11/09/2018 09:41:03 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-E" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard-stonewall"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 5.4%

240 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :       8376.114       8376.114       8376.114          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/09/2018 09:45:47 --
mdtest_hard_stat
-- started at 11/09/2018 09:34:47 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-T" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard-stonewall"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 6.3%

240 tasks, 2400000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :       8787.921       8787.921       8787.921          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/09/2018 09:39:18 --
mdtest_hard_write
-- started at 11/09/2018 09:17:41 --

mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/bin/mdtest "-C" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard" "-n" "10000" "-x" "/data/autoScratch/weekly/aaw360/io-500/benchmarks/io-500/io-500-qmul/datafiles.e8/io500.2018.11.09-08.59.58/mdt_hard-stonewall" "-W" "300"
Path: /gpfs/e8Scratch/ITSR-Testing/io-500/datafiles/io500.2018.11.09-08.59.58
FS: 124.2 TiB   Used FS: 8.8%   Inodes: 133.7 Mi   Used Inodes: 4.5%

240 tasks, 2400000 files
stonewall rank 120: 7703 of 9918 
stonewall rank 48: 7672 of 9918 
stonewall rank 97: 9870 of 9918 
stonewall rank 121: 7655 of 9918 
stonewall rank 49: 7669 of 9918 
stonewall rank 144: 7666 of 9918 
stonewall rank 98: 9891 of 9918 
stonewall rank 168: 7634 of 9918 
stonewall rank 24: 7526 of 9918 
stonewall rank 216: 7629 of 9918 
stonewall rank 192: 7470 of 9918 
stonewall rank 122: 7664 of 9918 
stonewall rank 50: 7669 of 9918 
stonewall rank 145: 7648 of 9918 
stonewall rank 99: 9874 of 9918 
stonewall rank 169: 7636 of 9918 
stonewall rank 25: 7583 of 9918 
stonewall rank 217: 7604 of 9918 
stonewall rank 193: 7559 of 9918 
stonewall rank 123: 7654 of 9918 
stonewall rank 51: 7668 of 9918 
stonewall rank 146: 7641 of 9918 
stonewall rank 100: 9870 of 9918 
stonewall rank 170: 7627 of 9918 
stonewall rank 26: 7587 of 9918 
stonewall rank 218: 7596 of 9918 
stonewall rank 194: 7498 of 9918 
stonewall rank 124: 7660 of 9918 
stonewall rank 52: 7675 of 9918 
stonewall rank 147: 7666 of 9918 
stonewall rank 101: 9909 of 9918 
stonewall rank 171: 7622 of 9918 
stonewall rank 27: 7527 of 9918 
stonewall rank 219: 7613 of 9918 
stonewall rank 195: 7480 of 9918 
stonewall rank 125: 7676 of 9918 
stonewall rank 53: 7668 of 9918 
stonewall rank 148: 7644 of 9918 
stonewall rank 102: 9887 of 9918 
stonewall rank 172: 7633 of 9918 
stonewall rank 28: 7539 of 9918 
stonewall rank 220: 7623 of 9918 
stonewall rank 196: 7505 of 9918 
stonewall rank 126: 7655 of 9918 
stonewall rank 54: 7665 of 9918 
stonewall rank 149: 7660 of 9918 
stonewall rank 103: 9881 of 9918 
stonewall rank 173: 7639 of 9918 
stonewall rank 29: 7566 of 9918 
stonewall rank 221: 7599 of 9918 
stonewall rank 197: 7467 of 9918 
stonewall rank 127: 7661 of 9918 
stonewall rank 55: 7707 of 9918 
stonewall rank 150: 7642 of 9918 
stonewall rank 104: 9878 of 9918 
stonewall rank 174: 7628 of 9918 
stonewall rank 30: 7534 of 9918 
stonewall rank 222: 7640 of 9918 
stonewall rank 198: 7469 of 9918 
stonewall rank 128: 7689 of 9918 
stonewall rank 56: 7706 of 9918 
stonewall rank 151: 7643 of 9918 
stonewall rank 105: 9859 of 9918 
stonewall rank 175: 7636 of 9918 
stonewall rank 31: 7525 of 9918 
stonewall rank 223: 7598 of 9918 
stonewall rank 199: 7474 of 9918 
stonewall rank 129: 7667 of 9918 
stonewall rank 57: 7714 of 9918 
stonewall rank 152: 7643 of 9918 
stonewall rank 106: 9861 of 9918 
stonewall rank 176: 7707 of 9918 
stonewall rank 32: 7533 of 9918 
stonewall rank 224: 7620 of 9918 
stonewall rank 200: 7475 of 9918 
stonewall rank 130: 7661 of 9918 
stonewall rank 58: 7667 of 9918 
stonewall rank 153: 7640 of 9918 
stonewall rank 107: 9871 of 9918 
stonewall rank 177: 7636 of 9918 
stonewall rank 33: 7525 of 9918 
stonewall rank 225: 7630 of 9918 
stonewall rank 201: 7471 of 9918 
stonewall rank 131: 7654 of 9918 
stonewall rank 59: 7671 of 9918 
stonewall rank 154: 7640 of 9918 
stonewall rank 108: 9879 of 9918 
stonewall rank 178: 7637 of 9918 
stonewall rank 34: 7531 of 9918 
stonewall rank 226: 7605 of 9918 
stonewall rank 202: 7470 of 9918 
stonewall rank 132: 7715 of 9918 
stonewall rank 60: 7678 of 9918 
stonewall rank 155: 7676 of 9918 
stonewall rank 109: 9903 of 9918 
stonewall rank 179: 7626 of 9918 
stonewall rank 35: 7545 of 9918 
stonewall rank 227: 7620 of 9918 
stonewall rank 203: 7479 of 9918 
stonewall rank 133: 7654 of 9918 
stonewall rank 61: 7669 of 9918 
stonewall rank 156: 7642 of 9918 
stonewall rank 110: 9895 of 9918 
stonewall rank 180: 7630 of 9918 
stonewall rank 36: 7526 of 9918 
stonewall rank 228: 7618 of 9918 
stonewall rank 204: 7489 of 9918 
stonewall rank 134: 7672 of 9918 
stonewall rank 62: 7700 of 9918 
stonewall rank 157: 7678 of 9918 
stonewall rank 112: 9871 of 9918 
stonewall rank 181: 7657 of 9918 
stonewall rank 37: 7525 of 9918 
stonewall rank 229: 7602 of 9918 
stonewall rank 205: 7478 of 9918 
stonewall rank 135: 7654 of 9918 
stonewall rank 63: 7667 of 9918 
stonewall rank 158: 7642 of 9918 
stonewall rank 113: 9917 of 9918 
stonewall rank 182: 7637 of 9918 
stonewall rank 38: 7531 of 9918 
stonewall rank 230: 7600 of 9918 
stonewall rank 206: 7467 of 9918 
stonewall rank 136: 7692 of 9918 
stonewall rank 64: 7666 of 9918 
stonewall rank 159: 7668 of 9918 
stonewall rank 114: 9862 of 9918 
stonewall rank 183: 7626 of 9918 
stonewall rank 39: 7542 of 9918 
stonewall rank 231: 7597 of 9918 
stonewall rank 207: 7481 of 9918 
stonewall rank 137: 7654 of 9918 
stonewall rank 65: 7666 of 9918 
stonewall rank 160: 7640 of 9918 
stonewall rank 115: 9871 of 9918 
stonewall rank 184: 7643 of 9918 
stonewall rank 40: 7533 of 9918 
stonewall rank 232: 7630 of 9918 
stonewall rank 208: 7472 of 9918 
stonewall rank 138: 7691 of 9918 
stonewall rank 66: 7692 of 9918 
stonewall rank 161: 7665 of 9918 
stonewall rank 116: 9867 of 9918 
stonewall rank 185: 7636 of 9918 
stonewall rank 41: 7549 of 9918 
stonewall rank 233: 7675 of 9918 
stonewall rank 209: 7482 of 9918 
stonewall rank 139: 7655 of 9918 
stonewall rank 67: 7673 of 9918 
stonewall rank 162: 7690 of 9918 
stonewall rank 117: 9861 of 9918 
stonewall rank 186: 7638 of 9918 
stonewall rank 42: 7530 of 9918 
stonewall rank 234: 7600 of 9918 
stonewall rank 210: 7470 of 9918 
stonewall rank 140: 7655 of 9918 
stonewall rank 68: 7710 of 9918 
stonewall rank 163: 7642 of 9918 
stonewall rank 118: 9872 of 9918 
stonewall rank 187: 7625 of 9918 
stonewall rank 43: 7530 of 9918 
stonewall rank 235: 7605 of 9918 
stonewall rank 211: 7475 of 9918 
stonewall rank 141: 7664 of 9918 
stonewall rank 69: 7669 of 9918 
stonewall rank 164: 7684 of 9918 
stonewall rank 119: 9858 of 9918 
stonewall rank 188: 7634 of 9918 
stonewall rank 44: 7539 of 9918 
stonewall rank 236: 7605 of 9918 
stonewall rank 212: 7469 of 9918 
stonewall rank 142: 7674 of 9918 
stonewall rank 70: 7669 of 9918 
stonewall rank 165: 7639 of 9918 
stonewall rank 96: 9882 of 9918 
stonewall rank 189: 7624 of 9918 
stonewall rank 45: 7528 of 9918 
stonewall rank 237: 7603 of 9918 
stonewall rank 213: 7468 of 9918 
stonewall rank 143: 7664 of 9918 
stonewall rank 71: 7665 of 9918 
stonewall rank 166: 7652 of 9918 
stonewall rank 190: 7645 of 9918 
stonewall rank 46: 7525 of 9918 
stonewall rank 238: 7596 of 9918 
stonewall rank 214: 7491 of 9918 
stonewall rank 167: 7648 of 9918 
stonewall rank 191: 7623 of 9918 
stonewall rank 47: 7541 of 9918 
stonewall rank 239: 7635 of 9918 
stonewall rank 215: 7467 of 9918 
stonewall rank 1: 7613 of 9918 
stonewall rank 2: 7625 of 9918 
stonewall rank 3: 7586 of 9918 
stonewall rank 4: 7599 of 9918 
stonewall rank 5: 7628 of 9918 
stonewall rank 6: 7602 of 9918 
stonewall rank 7: 7589 of 9918 
stonewall rank 8: 7595 of 9918 
stonewall rank 9: 7586 of 9918 
stonewall rank 10: 7599 of 9918 
stonewall rank 11: 7642 of 9918 
stonewall rank 12: 7588 of 9918 
stonewall rank 13: 7587 of 9918 
stonewall rank 14: 7594 of 9918 
stonewall rank 15: 7612 of 9918 
stonewall rank 16: 7598 of 9918 
stonewall rank 17: 7586 of 9918 
stonewall rank 18: 7591 of 9918 
stonewall rank 19: 7597 of 9918 
stonewall rank 20: 7587 of 9918 
stonewall rank 21: 7585 of 9918 
stonewall rank 72: 7225 of 9918 
stonewall rank 22: 7590 of 9918 
stonewall rank 73: 7220 of 9918 
stonewall rank 23: 7598 of 9918 
stonewall rank 74: 7225 of 9918 
Continue stonewall hit min: 7218 max: 9918 avg: 7798.0 
stonewall rank 0: 7587 of 9918 
stonewall rank 75: 7338 of 9918 
stonewall rank 77: 7222 of 9918 
stonewall rank 78: 7235 of 9918 
stonewall rank 79: 7245 of 9918 
stonewall rank 80: 7228 of 9918 
stonewall rank 81: 7229 of 9918 
stonewall rank 82: 7228 of 9918 
stonewall rank 83: 7221 of 9918 
stonewall rank 84: 7218 of 9918 
stonewall rank 85: 7221 of 9918 
stonewall rank 86: 7246 of 9918 
stonewall rank 87: 7218 of 9918 
stonewall rank 88: 7218 of 9918 
stonewall rank 89: 7233 of 9918 
stonewall rank 90: 7220 of 9918 
stonewall rank 91: 7218 of 9918 
stonewall rank 92: 7241 of 9918 
stonewall rank 93: 7230 of 9918 
stonewall rank 94: 7227 of 9918 
stonewall rank 95: 7228 of 9918 
stonewall rank 76: 7229 of 9918 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :       6000.163       6000.163       6000.163          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :       1369.792       1369.792       1369.792          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/09/2018 09:24:21 --
result_summary
[RESULT] BW   phase 1            ior_easy_write                8.859 GB/s : time 396.82 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               19.421 kiops : time 315.15 seconds
[RESULT] BW   phase 2            ior_hard_write                1.261 GB/s : time 333.21 seconds
[RESULT] IOPS phase 2         mdtest_hard_write                6.000 kiops : time 405.27 seconds
[RESULT] IOPS phase 3                      find              431.600 kiops : time  16.57 seconds
[RESULT] BW   phase 3             ior_easy_read                8.755 GB/s : time 401.54 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat               67.238 kiops : time  75.64 seconds
[RESULT] BW   phase 4             ior_hard_read                3.556 GB/s : time 118.20 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat                8.788 kiops : time 275.81 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               50.478 kiops : time 100.92 seconds
[RESULT] IOPS phase 7          mdtest_hard_read                8.376 kiops : time 287.87 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete                4.709 kiops : time 511.74 seconds
[SCORE] Bandwidth 4.31865 GB/s : IOPS 22.2083 kiops : TOTAL 9.79336