ofsdev

Institution Clemson University
Client Procs Per Node
Client Operating System Oracle Linux
Client Operating System Version 7.5
Client Kernel Version 3.10.0-862.14.4.el7.x86_64

DATA SERVER

Storage Type HDD
Volatile Memory 16GB
Storage Interface SAS
Network InfiniBand FDR
Software Version 2.10.5
OS Version 7.5

INFORMATION

Client Nodes 16
Client Total Procs 128
Metadata Nodes 16
Metadata Storage Devices 2
Data Nodes 16
Data Storage Devices 12

METADATA

Easy Write 19.81 kIOP/s
Easy Stat 99.65 kIOP/s
Easy Delete 21.10 kIOP/s
Hard Write 15.16 kIOP/s
Hard Read 30.90 kIOP/s
Hard Stat 38.66 kIOP/s
Hard Delete 9.86 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.
# Please also edit 'extra_description' function.
#set -x

if [ "$1" == "" ]
then
	SCALE=1
else
	SCALE=$1
fi


NP=$(( $SCALE * 16 ))

echo "$SCALE processes per node for $NP processes."

set -euo pipefail  # better error handling

export OFS_MOUNT=/mnt/lustre/jburto2

# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True"  # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True"  # does the creat phase and enables the subsequent read
io500_run_find="True"     
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"  
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="True"  # this one is optional
io500_cleanup_workdir="False"  # this flag is currently ignored. You'll need to clean up your data files manually if you want to.
io500_stonewall_timer=300 # Stonewalling timer, stop with wearout after 300s with default test, set to 0, if you never want to abort...


# to run this benchmark, find and edit each of these functions.
# please also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths    
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional
  run_benchmarks
}

function setup_directories {
  # set directories for where the benchmark files are created and where the results will go.
  # If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it. 
  timestamp=`date +%Y.%m.%d-%H.%M.%S`           # create a uniquifier
  io500_workdir=$OFS_MOUNT/io500/datafiles/io500.$timestamp # directory where the data will be stored
  io500_result_dir=$PWD/results/$timestamp      # the directory where the output results will be kept

  mkdir -p $io500_workdir $io500_result_dir
  mkdir -p ${io500_workdir}/ior_easy ${io500_workdir}/ior_hard 
  #mkdir -p ${io500_workdir}/mdt_easy ${io500_workdir}/mdt_hard

# for ior_easy, large chunks, as few targets as will allow the files to be evenly spread.
  lfs setstripe -c 1 ${io500_workdir}/ior_easy  # turn off striping for ior_easy
# stripe across all OSTs for ior_hard, 64k chunksize
  lfs setstripe -c -1 -S 256k ${io500_workdir}/ior_hard 
#  lfs setstripe -c -1 -S 64k ${io500_workdir}/ior_hard 
# Enable DNE2
# https://lustre.ornl.gov/ecosystem-2016/documents/papers/LustreEco2016-Simmons-DNE.pdf 
  lfs setdirstripe -c 16 ${io500_workdir}/mdt_easy
  lfs setdirstripe -c 16 ${io500_workdir}/mdt_hard
  echo "Stripes set"
}

function setup_paths {
  # Set the paths to the binaries.  If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpi_prefix="/usr/lib64/openmpi"
  #io500_mpi_prefix="/home/jburto2/openmpi/1.10.7"
  io500_mpirun="$io500_mpi_prefix/bin/mpirun"

  # Run OpenMPI over IB to keep the ethernet network clear for data. Map by node to balance processes.
  # The I/O 500 benchmarks are not heavy on interprocess communication.
  io500_mpiargs="-np $NP --mca btl_tcp_if_exclude ib0 --mca btl ^openib --map-by node --machinefile /home/jburto2/pvfsnodelistmpi --prefix $io500_mpi_prefix"
}

function setup_ior_easy {
# 2M writes, 240 GB per proc, file per proc. 
  io500_ior_easy_size=$((300 * 1024 / $SCALE))
  io500_ior_easy_params="-t 4m -b ${io500_ior_easy_size}m -F -a MPIIO"
   
}

function setup_mdt_easy {
# one level, 11 directories, unique dir per thread, files only at leaves.
# BeeGFS doesn't have distributed directories, so more directories = better distribution. 
#  io500_mdtest_easy_params="-z 1 -b 6 -u -L" 
  io500_mdtest_easy_params="-u -L" 
  io500_mdtest_easy_files_per_proc=$((1280000 / $SCALE ))
}

function setup_ior_hard {
  if [ "$SCALE" == "1" ] 
  then
  	io500_ior_hard_writes_per_proc=128000
  else	
  	io500_ior_hard_writes_per_proc=$(( 128000 / $SCALE ))
  fi

  io500_ior_hard_other_options=" -a MPIIO"

}

function setup_mdt_hard {
  io500_mdtest_hard_files_per_proc="$(( 500000 / $SCALE ))"
  io500_mdtest_files_per_proc=$(( 500000 / $SCALE )) 
  io500_mdtest_hard_other_options=""
}

function setup_find {
  #
  # setup the find command. This is an area where innovation is allowed.
  #    There are three default options provided. One is a serial find, one is python
  #    parallel version, one is C parallel version.  Current default is to use serial.
  #    But it is very slow. We recommend to either customize or use the C parallel version.
  #    For GPFS, we recommend to use the provided mmfind wrapper described below.
  #    Instructions below.
  #    If a custom approach is used, please provide enough info so others can reproduce.

  # the serial version that should run (SLOWLY) without modification
  #io500_find_mpi="False"
  #io500_find_cmd=$PWD/bin/sfind.sh
  #io500_find_cmd_args=""

  # a parallel version in C, the -s adds a stonewall
  #   for a real run, turn -s (stonewall) off or set it at 300 or more
  #   to prepare this (assuming you've run ./utilities/prepare.sh already):
  #   > cd build/pfind
  #   > ./prepare.sh
  #   > ./compile.sh
  #   > cp pfind ../../bin/ 
  #   If you use io500_find_mpi="True", then this will run with the same
  #   number of MPI nodes and ranks as the other phases.
  #   If you prefer another number, and fewer might be better here,
  #   Then you can set io500_find_mpi to be "False" and write a wrapper
  #   script for this which sets up MPI as you would like.  Then change
  #   io500_find_cmd to point to your wrapper script. 
  io500_find_mpi="True"
  io500_find_cmd="$PWD/bin/pfind"
  io500_find_cmd_args="-s 10000 -r $io500_result_dir/pfind_results"
  
  # for GPFS systems, you should probably use the provided mmfind wrapper 
  # if you used ./utilities/prepare.sh, you'll find this wrapper in ./bin/mmfind.sh
  #io500_find_mpi="False"
  #io500_find_cmd="$PWD/bin/mmfind.sh"
  #io500_find_cmd_args=""
}

function setup_mdreal {
  io500_mdreal_params="-P=5000 -I=1000"
}

function run_benchmarks {
  # Important: source the io500_fixed.sh script.  Do not change it. If you discover
  # a need to change it, please email the mailing list to discuss
  source ./bin/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}

# Add key/value pairs defining your system 
# Feel free to add extra ones if you'd like
function extra_description {
  # top level info
  io500_info_system_name='Palmetto ofstest'      # e.g. Oakforest-PACS
  io500_info_institute_name='Clemson University'   # e.g. JCAHPC
  io500_info_storage_age_in_months='0' # not install date but age since last refresh
  io500_info_storage_install_date='4/12'  # MM/YY
  io500_info_filesysem='Lustre'     # e.g. BeeGFS, DataWarp, GPFS, IME, Lustre
  io500_info_filesystem_version='2.10.5'
  # client side info
  io500_info_num_client_nodes="$(( ${SCALE} * 16 ))"
  io500_info_procs_per_node="1"
  # server side info
  io500_info_num_metadata_server_nodes='16'
  io500_info_num_data_server_nodes='16'
  io500_info_num_data_storage_devices='160'  # if you have 5 data servers, and each has 5 drives, then this number is 25
  io500_info_num_metadata_storage_devices='32'  # if you have 2 metadata servers, and each has 5 drives, then this number is 10
  io500_info_data_storage_type='HDD' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_metadata_storage_type='SSD' # HDD, SSD, persistent memory, etc, feel free to put specific models
  io500_info_storage_network='infiniband' # infiniband, omnipath, ethernet, etc
  io500_info_storage_interface='SAS' # SAS, SATA, NVMe, etc
  # miscellaneous
  io500_info_whatever='infiniband'
}

main
ior_easy_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  8 16:15:31 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 4m -b 38400m -F -a MPIIO -o /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy/ior_file_easy -O stoneWallingStatusFile=/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy/stonewall
Machine             : Linux pvfs017-ib0.palmetto.clemson.edu
TestID              : 0
StartTime           : Thu Nov  8 16:15:31 2018
Path                : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy
FS                  : 144.2 TiB   Used FS: 15.5%   Inodes: 290.9 Mi   Used Inodes: 4.5%

Options: 
api                 : MPIIO
apiVersion          : (3.0)
test filename       : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 128
clients per node    : 8
repetitions         : 1
xfersize            : 4 MiB
blocksize           : 37.50 GiB
aggregate filesize  : 4.69 TiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      7247       39321600   4096       0.008766   678.24     0.056545   678.26     0   
Max Read:  7246.83 MiB/sec (7598.85 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         7246.83    7246.83    7246.83       0.00    1811.71    1811.71    1811.71       0.00  678.25532     0    128   8    1   1     1        1         0    0      1 40265318400  4194304 4915200.0 MPIIO      0
Finished            : Thu Nov  8 16:26:49 2018
ior_easy_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  8 15:34:48 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 4m -b 38400m -F -a MPIIO -o /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy/ior_file_easy -O stoneWallingStatusFile=/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux pvfs017-ib0.palmetto.clemson.edu
TestID              : 0
StartTime           : Thu Nov  8 15:34:48 2018
Path                : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy
FS                  : 144.2 TiB   Used FS: 12.2%   Inodes: 284.9 Mi   Used Inodes: 0.0%

Options: 
api                 : MPIIO
apiVersion          : (3.0)
test filename       : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 128
clients per node    : 8
repetitions         : 1
xfersize            : 4 MiB
blocksize           : 37.50 GiB
aggregate filesize  : 4.69 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 3502 max: 9600 -- min data: 13.7 GiB mean data: 32.5 GiB time: 301.5s
write     12775      39321600   4096       0.011520   384.70     0.055057   384.77     0   
Max Write: 12774.51 MiB/sec (13395.04 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       12774.51   12774.51   12774.51       0.00    3193.63    3193.63    3193.63       0.00  384.76631     0    128   8    1   1     1        1         0    0      1 40265318400  4194304 4915200.0 MPIIO      0
Finished            : Thu Nov  8 15:41:13 2018
ior_hard_read
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  8 16:28:24 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 16000 -a MPIIO -o /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard/IOR_file -O stoneWallingStatusFile=/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard/stonewall
Machine             : Linux pvfs017-ib0.palmetto.clemson.edu
TestID              : 0
StartTime           : Thu Nov  8 16:28:24 2018
Path                : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard
FS                  : 144.2 TiB   Used FS: 15.5%   Inodes: 290.9 Mi   Used Inodes: 4.5%

Options: 
api                 : MPIIO
apiVersion          : (3.0)
test filename       : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 16000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 128
clients per node    : 8
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 89.66 GiB

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      873.95     45.91      45.91      1.02       104.03     0.011529   105.05     0   
Max Read:  873.95 MiB/sec (916.41 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read          873.95     873.95     873.95       0.00   19494.69   19494.69   19494.69       0.00  105.05425     0    128   8    1   0     1        1         0    0  16000    47008    47008   91812.5 MPIIO      0
Finished            : Thu Nov  8 16:30:09 2018
ior_hard_write
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  8 15:59:00 2018
Command line        : /home/jburto2/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 16000 -a MPIIO -o /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard/IOR_file -O stoneWallingStatusFile=/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux pvfs017-ib0.palmetto.clemson.edu
TestID              : 0
StartTime           : Thu Nov  8 15:59:00 2018
Path                : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard
FS                  : 144.2 TiB   Used FS: 15.4%   Inodes: 290.9 Mi   Used Inodes: 2.9%

Options: 
api                 : MPIIO
apiVersion          : (3.0)
test filename       : /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 16000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
tasks               : 128
clients per node    : 8
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 89.66 GiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 10250 max: 16000 -- min data: 0.4 GiB mean data: 0.6 GiB time: 300.2s
write     242.05     45.91      45.91      0.073906   379.10     0.128162   379.31     0   
Max Write: 242.05 MiB/sec (253.81 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write         242.05     242.05     242.05       0.00    5399.33    5399.33    5399.33       0.00  379.30629     0    128   8    1   0     1        1         0    0  16000    47008    47008   91812.5 MPIIO      0
Finished            : Thu Nov  8 16:05:19 2018
mdtest_easy_delete
-- started at 11/08/2018 16:32:18 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-r" "-F" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_easy" "-n" "160000" "-u" "-L" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_easy-stonewall"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.5%   Inodes: 290.9 Mi   Used Inodes: 4.5%

128 tasks, 20480000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :      21098.433      21098.433      21098.433          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          1.760          1.760          1.760          0.000

-- finished at 11/08/2018 16:39:21 --
mdtest_easy_stat
-- started at 11/08/2018 16:26:52 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-T" "-F" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_easy" "-n" "160000" "-u" "-L" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_easy-stonewall"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.5%   Inodes: 290.9 Mi   Used Inodes: 4.5%

128 tasks, 20480000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :      99652.602      99652.602      99652.602          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/08/2018 16:28:22 --
mdtest_easy_write
-- started at 11/08/2018 15:41:15 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-C" "-F" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_easy" "-n" "160000" "-u" "-L" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_easy-stonewall" "-W" "300"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.4%   Inodes: 288.0 Mi   Used Inodes: 0.0%

128 tasks, 20480000 files
stonewall rank 80: 51287 of 69657 
stonewall rank 48: 51462 of 69657 
Continue stonewall hit min: 35991 max: 69657 avg: 50748.2 
stonewall rank 0: 51732 of 69657 
stonewall rank 32: 51932 of 69657 
stonewall rank 16: 51335 of 69657 
stonewall rank 112: 51376 of 69657 
stonewall rank 96: 51567 of 69657 
stonewall rank 64: 51749 of 69657 
stonewall rank 40: 69485 of 69657 
stonewall rank 120: 68203 of 69657 
stonewall rank 56: 68221 of 69657 
stonewall rank 88: 68024 of 69657 
stonewall rank 24: 68148 of 69657 
stonewall rank 72: 69549 of 69657 
stonewall rank 104: 69493 of 69657 
stonewall rank 36: 46980 of 69657 
stonewall rank 4: 46678 of 69657 
stonewall rank 100: 46723 of 69657 
stonewall rank 68: 46864 of 69657 
stonewall rank 52: 42550 of 69657 
stonewall rank 116: 42461 of 69657 
stonewall rank 20: 42494 of 69657 
stonewall rank 84: 42260 of 69657 
stonewall rank 76: 36175 of 69657 
stonewall rank 12: 36082 of 69657 
stonewall rank 124: 36039 of 69657 
stonewall rank 60: 36104 of 69657 
stonewall rank 108: 36147 of 69657 
stonewall rank 92: 36098 of 69657 
stonewall rank 44: 36066 of 69657 
stonewall rank 28: 35991 of 69657 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      45558.875      45558.875      45558.875          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :        104.308        104.308        104.308          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/08/2018 15:48:45 --
stonewall rank 2: 68501 of 69657 
stonewall rank 114: 68544 of 69657 
stonewall rank 66: 68410 of 69657 
stonewall rank 82: 68693 of 69657 
stonewall rank 50: 68661 of 69657 
stonewall rank 18: 68494 of 69657 
stonewall rank 34: 68110 of 69657 
stonewall rank 98: 68221 of 69657 
stonewall rank 42: 51493 of 69657 
stonewall rank 102: 41899 of 69657 
stonewall rank 6: 41935 of 69657 
stonewall rank 54: 41841 of 69657 
stonewall rank 22: 41713 of 69657 
stonewall rank 90: 51026 of 69657 
stonewall rank 86: 41735 of 69657 
stonewall rank 122: 51204 of 69657 
stonewall rank 38: 42019 of 69657 
stonewall rank 106: 51530 of 69657 
stonewall rank 118: 41660 of 69657 
stonewall rank 10: 51355 of 69657 
stonewall rank 70: 42160 of 69657 
stonewall rank 26: 51012 of 69657 
stonewall rank 78: 42349 of 69657 
stonewall rank 74: 51456 of 69657 
stonewall rank 58: 51196 of 69657 
stonewall rank 62: 42090 of 69657 
stonewall rank 126: 41701 of 69657 
stonewall rank 46: 42369 of 69657 
stonewall rank 30: 41729 of 69657 
stonewall rank 14: 42350 of 69657 
stonewall rank 94: 41805 of 69657 
stonewall rank 110: 42418 of 69657 
stonewall rank 113: 49829 of 69657 
stonewall rank 81: 49965 of 69657 
stonewall rank 17: 50052 of 69657 
stonewall rank 1: 50090 of 69657 
stonewall rank 49: 49871 of 69657 
stonewall rank 33: 50181 of 69657 
stonewall rank 97: 50425 of 69657 
stonewall rank 65: 50123 of 69657 
stonewall rank 25: 42039 of 69657 
stonewall rank 57: 42095 of 69657 
stonewall rank 53: 51102 of 69657 
stonewall rank 89: 41914 of 69657 
stonewall rank 41: 42265 of 69657 
stonewall rank 117: 51252 of 69657 
stonewall rank 121: 42347 of 69657 
stonewall rank 101: 51603 of 69657 
stonewall rank 105: 42210 of 69657 
stonewall rank 37: 51499 of 69657 
stonewall rank 9: 42255 of 69657 
stonewall rank 21: 51285 of 69657 
stonewall rank 73: 42011 of 69657 
stonewall rank 5: 51653 of 69657 
stonewall rank 69: 51512 of 69657 
stonewall rank 85: 51143 of 69657 
stonewall rank 13: 51291 of 69657 
stonewall rank 61: 49952 of 69657 
stonewall rank 109: 51261 of 69657 
stonewall rank 77: 51376 of 69657 
stonewall rank 45: 51294 of 69657 
stonewall rank 93: 49864 of 69657 
stonewall rank 125: 49979 of 69657 
stonewall rank 29: 50032 of 69657 
stonewall rank 35: 43756 of 69657 
stonewall rank 67: 43824 of 69657 
stonewall rank 51: 43965 of 69657 
stonewall rank 99: 43647 of 69657 
stonewall rank 19: 43736 of 69657 
stonewall rank 115: 43715 of 69657 
stonewall rank 3: 43722 of 69657 
stonewall rank 83: 43856 of 69657 
stonewall rank 75: 50593 of 69657 
stonewall rank 123: 49819 of 69657 
stonewall rank 91: 49920 of 69657 
stonewall rank 43: 50420 of 69657 
stonewall rank 11: 50722 of 69657 
stonewall rank 59: 49919 of 69657 
stonewall rank 27: 49954 of 69657 
stonewall rank 107: 50634 of 69657 
stonewall rank 7: 50574 of 69657 
stonewall rank 31: 67898 of 69657 
stonewall rank 55: 50312 of 69657 
stonewall rank 71: 50595 of 69657 
stonewall rank 103: 50670 of 69657 
stonewall rank 39: 50827 of 69657 
stonewall rank 87: 50315 of 69657 
stonewall rank 119: 50371 of 69657 
stonewall rank 23: 50474 of 69657 
stonewall rank 111: 68916 of 69657 
stonewall rank 63: 67947 of 69657 
stonewall rank 127: 67925 of 69657 
stonewall rank 95: 67824 of 69657 
stonewall rank 79: 68909 of 69657 
stonewall rank 47: 68903 of 69657 
stonewall rank 15: 68724 of 69657 
mdtest_hard_delete
-- started at 11/08/2018 16:42:00 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-r" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard" "-n" "62500" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard-stonewall"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.5%   Inodes: 286.0 Mi   Used Inodes: 1.6%

128 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :       9862.080       9862.080       9862.080          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          7.964          7.964          7.964          0.000

-- finished at 11/08/2018 16:50:03 --
mdtest_hard_read
-- started at 11/08/2018 16:39:23 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-E" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard" "-n" "62500" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard-stonewall"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.5%   Inodes: 285.0 Mi   Used Inodes: 1.6%

128 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :      30903.196      30903.196      30903.196          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/08/2018 16:41:57 --
mdtest_hard_stat
-- started at 11/08/2018 16:30:12 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-T" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard" "-n" "62500" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard-stonewall"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.5%   Inodes: 290.9 Mi   Used Inodes: 4.5%

128 tasks, 8000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :      38658.747      38658.747      38658.747          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/08/2018 16:32:15 --
mdtest_hard_write
-- started at 11/08/2018 16:05:22 --

mdtest-1.9.3 was launched with 128 total task(s) on 16 node(s)
Command line used: /home/jburto2/io-500-dev/bin/mdtest "-C" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard" "-n" "62500" "-x" "/mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45/mdt_hard-stonewall" "-W" "300"
Path: /mnt/lustre/jburto2/io500/datafiles/io500.2018.11.08-15.34.45
FS: 144.2 TiB   Used FS: 15.5%   Inodes: 290.9 Mi   Used Inodes: 2.9%

128 tasks, 8000000 files
stonewall rank 120: 36380 of 37186 
Continue stonewall hit min: 34433 max: 37186 avg: 35842.4 
stonewall rank 0: 36679 of 37186 
stonewall rank 32: 36542 of 37186 
stonewall rank 96: 36605 of 37186 
stonewall rank 48: 36627 of 37186 
stonewall rank 16: 36436 of 37186 
stonewall rank 80: 36406 of 37186 
stonewall rank 64: 36900 of 37186 
stonewall rank 112: 36548 of 37186 
stonewall rank 12: 35102 of 37186 
stonewall rank 56: 36467 of 37186 
stonewall rank 44: 35081 of 37186 
stonewall rank 72: 36596 of 37186 
stonewall rank 4: 36650 of 37186 
stonewall rank 24: 36556 of 37186 
stonewall rank 36: 36415 of 37186 
stonewall rank 68: 36438 of 37186 
stonewall rank 104: 36383 of 37186 
stonewall rank 84: 35799 of 37186 
stonewall rank 88: 36396 of 37186 
stonewall rank 8: 36270 of 37186 
stonewall rank 116: 35958 of 37186 
stonewall rank 40: 36531 of 37186 
stonewall rank 20: 35777 of 37186 
stonewall rank 52: 35840 of 37186 
stonewall rank 100: 36482 of 37186 
stonewall rank 28: 35272 of 37186 
stonewall rank 60: 35179 of 37186 
stonewall rank 76: 35003 of 37186 
stonewall rank 92: 35294 of 37186 
stonewall rank 124: 35285 of 37186 
stonewall rank 108: 35059 of 37186 

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      25430.040      25430.040      25430.040          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :       1437.122       1437.122       1437.122          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/08/2018 16:10:36 --
stonewall rank 82: 36693 of 37186 
stonewall rank 18: 36727 of 37186 
stonewall rank 2: 36616 of 37186 
stonewall rank 66: 36524 of 37186 
stonewall rank 114: 36601 of 37186 
stonewall rank 34: 36640 of 37186 
stonewall rank 50: 36653 of 37186 
stonewall rank 98: 36666 of 37186 
stonewall rank 42: 36496 of 37186 
stonewall rank 106: 36214 of 37186 
stonewall rank 90: 36276 of 37186 
stonewall rank 94: 34911 of 37186 
stonewall rank 26: 36248 of 37186 
stonewall rank 10: 36468 of 37186 
stonewall rank 6: 35177 of 37186 
stonewall rank 122: 36227 of 37186 
stonewall rank 102: 35198 of 37186 
stonewall rank 74: 36267 of 37186 
stonewall rank 70: 35271 of 37186 
stonewall rank 58: 36253 of 37186 
stonewall rank 38: 35175 of 37186 
stonewall rank 118: 34907 of 37186 
stonewall rank 22: 35195 of 37186 
stonewall rank 86: 34916 of 37186 
stonewall rank 54: 35258 of 37186 
stonewall rank 126: 34756 of 37186 
stonewall rank 46: 34874 of 37186 
stonewall rank 110: 34883 of 37186 
stonewall rank 78: 34819 of 37186 
stonewall rank 14: 34782 of 37186 
stonewall rank 30: 34910 of 37186 
stonewall rank 62: 34811 of 37186 
stonewall rank 81: 35173 of 37186 
stonewall rank 49: 35291 of 37186 
stonewall rank 33: 35203 of 37186 
stonewall rank 1: 35232 of 37186 
stonewall rank 97: 35103 of 37186 
stonewall rank 17: 35319 of 37186 
stonewall rank 65: 35083 of 37186 
stonewall rank 113: 35326 of 37186 
stonewall rank 25: 34694 of 37186 
stonewall rank 37: 37077 of 37186 
stonewall rank 9: 34553 of 37186 
stonewall rank 5: 37107 of 37186 
stonewall rank 89: 34554 of 37186 
stonewall rank 85: 36972 of 37186 
stonewall rank 41: 34733 of 37186 
stonewall rank 117: 36986 of 37186 
stonewall rank 73: 34633 of 37186 
stonewall rank 69: 37174 of 37186 
stonewall rank 57: 34574 of 37186 
stonewall rank 53: 37080 of 37186 
stonewall rank 105: 34553 of 37186 
stonewall rank 121: 34676 of 37186 
stonewall rank 21: 37054 of 37186 
stonewall rank 125: 37009 of 37186 
stonewall rank 29: 36833 of 37186 
stonewall rank 109: 36870 of 37186 
stonewall rank 13: 36924 of 37186 
stonewall rank 61: 37043 of 37186 
stonewall rank 77: 36805 of 37186 
stonewall rank 45: 36774 of 37186 
stonewall rank 93: 36891 of 37186 
stonewall rank 3: 37174 of 37186 
stonewall rank 99: 36994 of 37186 
stonewall rank 83: 36992 of 37186 
stonewall rank 115: 37082 of 37186 
stonewall rank 51: 36959 of 37186 
stonewall rank 19: 37087 of 37186 
stonewall rank 35: 37176 of 37186 
stonewall rank 67: 37128 of 37186 
stonewall rank 107: 34609 of 37186 
stonewall rank 91: 34675 of 37186 
stonewall rank 79: 35991 of 37186 
stonewall rank 59: 34664 of 37186 
stonewall rank 63: 35911 of 37186 
stonewall rank 43: 34433 of 37186 
stonewall rank 111: 35882 of 37186 
stonewall rank 75: 34594 of 37186 
stonewall rank 11: 34516 of 37186 
stonewall rank 23: 34633 of 37186 
stonewall rank 27: 34791 of 37186 
stonewall rank 103: 34854 of 37186 
stonewall rank 123: 34755 of 37186 
stonewall rank 39: 34887 of 37186 
stonewall rank 7: 34829 of 37186 
stonewall rank 119: 34686 of 37186 
stonewall rank 87: 34551 of 37186 
stonewall rank 55: 34719 of 37186 
stonewall rank 95: 35962 of 37186 
stonewall rank 71: 34949 of 37186 
stonewall rank 31: 35866 of 37186 
stonewall rank 127: 35885 of 37186 
stonewall rank 47: 35865 of 37186 
stonewall rank 15: 35880 of 37186 
result_summary
[RESULT] BW   phase 1            ior_easy_write               12.476 GB/s : time 384.77 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               45.559 kiops : time 1063.22 seconds
[RESULT] BW   phase 2            ior_hard_write                0.236 GB/s : time 379.31 seconds
[RESULT] IOPS phase 2         mdtest_hard_write               25.430 kiops : time 571.28 seconds
[RESULT] IOPS phase 3                      find              364.780 kiops : time  37.49 seconds
[RESULT] BW   phase 3             ior_easy_read                7.077 GB/s : time 678.26 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat               99.653 kiops : time  92.08 seconds
[RESULT] BW   phase 4             ior_hard_read                0.853 GB/s : time 105.05 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat               38.659 kiops : time 125.67 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               21.098 kiops : time 425.88 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               30.903 kiops : time 156.65 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete                9.862 kiops : time 485.35 seconds
[SCORE] Bandwidth 2.05437 GB/s : IOPS 42.412 kiops : TOTAL 9.33434