AI400

Institution DDN
Client Procs Per Node
Client Operating System CentOS
Client Operating System Version 8.0
Client Kernel Version 4.18.0-80.el8.x86_64

DATA SERVER

Storage Type SSD
Volatile Memory 64GB
Storage Interface NVMe
Network Infiniband
Software Version 2.12.58
OS Version CentOS7.6

INFORMATION

Client Nodes 10
Client Total Procs 240
Metadata Nodes 2
Metadata Storage Devices 0
Data Nodes 2
Data Storage Devices 12

METADATA

Easy Write 152.84 kIOP/s
Easy Stat 451.97 kIOP/s
Easy Delete 132.76 kIOP/s
Hard Write 79.65 kIOP/s
Hard Read 172.59 kIOP/s
Hard Stat 449.93 kIOP/s
Hard Delete 75.15 kIOP/s

Submitted Files

io500
#!/bin/bash
#SBATCH --job-name=io500-ai400-2
#SBATCH -P 32n
#SBATCH --nodes=10
#SBATCH --ntasks-per-node=24
#SBATCH -o io_500_out_%j
#SBATCH -e io_500_err_%J
#SBATCH --dependency=singleton

LUSTRE_MDS=es400nv-vm[1-4]
LUSTRE_OSS=es400nv-vm[1-4]
#LUSTRE_CLIENT=${SLURM_JOB_NODELIST}
LUSTRE_CLIENT=c[01-10]
LUSTRE_CLIENT_CONTAINER=${LUSTRE_CLIENT}-c[0-3]
MGS=10.0.11.224@o2ib10
FSNAME=/ai400
MNT=/ai400

ROOT=`pwd`
module purge
module load mpi/gcc/openmpi/2.1.6
PDSH="pdsh"

#./utilities/prepare.sh

for i in `seq 0 3`; do 
	$PDSH -w ${LUSTRE_CLIENT} "docker stop %h-c$i"
	$PDSH -w ${LUSTRE_CLIENT} "docker rm %h-c$i"
done

$PDSH -w ${LUSTRE_CLIENT} "docker build -t lustreclient ${PWD}/."
for i in `seq 0 3`; do
	$PDSH -w ${LUSTRE_CLIENT} "docker run -h %h-c$i --name %h-c$i -d -p 2000$i:2000$i -e SSH_PORT=2000$i -v /work:/work -v /dev/infiniband:/dev/infiniband -v /usr/lib64:/usr/lib64 -v /sys/class/infiniband_verbs:/sys/class/infiniband_verbs -v /etc/libibverbs.d:/etc/libibverbs.d --privileged lustreclient"
done
$PDSH -w ${LUSTRE_CLIENT} 'docker ps'

# Lustre MDS/OSS Setting
$PDSH -w ${LUSTRE_MDS} "echo 128 > /sys/module/mdt/parameters/max_mod_rpcs_per_client"
$PDSH -w ${LUSTRE_OSS},${LUSTRE_MDS} "sysctl -w vm.min_free_kbytes=524288"
$PDSH -w ${LUSTRE_OSS} lctl set_param \
osd-ldiskfs.*.read_cache_enable=0 \
osd-ldiskfs.*.writethrough_cache_enable=0 \
obdfilter.*.brw_size=16 \
obdfilter.*.precreate_batch=1024

# ReMount Lustre Client
$PDSH -w ${LUSTRE_CLIENT_CONTAINER} umount -t lustre -a
$PDSH -w ${LUSTRE_CLIENT_CONTAINER} mount -t lustre ${MGS}:${FSNAME} ${MNT}
sleep 2

# Lustre Client Setting
$PDSH -w ${LUSTRE_CLIENT_CONTAINER} lctl set_param \
osc.*.max_pages_per_rpc=16M \
osc.*.max_rpcs_in_flight=16 \
osc.*.max_dirty_mb=512 \
osc.*.checksums=0 \
llite.*.max_read_ahead_mb=2048 \
llite.*.max_read_ahead_per_file_mb=256 \
llite.*.max_cached_mb=16384 \
ldlm.namespaces.*.lru_size=0 \
ldlm.namespaces.*.lru_max_age=5000 \
mdc.*.max_rpcs_in_flight=128 \
mdc.*.max_mod_rpcs_in_flight=127
sleep 2

# Cleanup & TRIM to all OSTs
$PDSH -w ${LUSTRE_CLIENT_CONTAINER} lctl set_param ldlm.namespaces.*.lru_size=clear
#$PDSH -w ${LUSTRE_OSS} fstrim -av
$PDSH -w ${LUSTRE_MDS},${LUSTRE_OSS} "echo 3 > /proc/sys/vm/drop_caches"
$PDSH -w ${LUSTRE_CLIENT} "cpupower frequency-set -g performance"

#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.
# Please also edit 'extra_description' function.

set -euo pipefail  # better error handling

# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True"  # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True"  # does the creat phase and enables the subsequent read
io500_run_find="True"
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="False"  # this one is optional
io500_cleanup_workdir="False"  # this flag is currently ignored. You'll need to clean up your data files manually if you want to.
io500_clean_cache="False" # attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_stonewall_timer=300 # Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_rules="regular" # Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds

io500_mdtest_easy_files_per_proc=200000
io500_mdtest_hard_files_per_proc=142500
io500_ior_hard_writes_per_proc=125000
io500_ior_easy_size=44000

# to run this benchmark, find and edit each of these functions.
# please also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional
  run_benchmarks
}

function setup_directories {
  # set directories for where the benchmark files are created and where the results will go.
  # If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it.
  timestamp=`date +%Y.%m.%d-%H.%M.%S`           # create a uniquifier
  io500_workdir=${MNT}/datafiles/io500.$timestamp # directory where the data will be stored
  io500_result_dir=$PWD/results/$timestamp      # the directory where the output results will be kept
  mkdir -p $io500_workdir $io500_result_dir
  mkdir -p $io500_workdir/ior_hard $io500_workdir/ior_easy
  lfs setdirstripe -c 4 $io500_workdir/mdt_easy
  lfs setdirstripe -c 4 $io500_workdir/mdt_hard
  lfs setdirstripe -c 4 -D $io500_workdir/mdt_easy
  lfs setdirstripe -c 4 -D $io500_workdir/mdt_hard

  lfs setstripe -L mdt -E 1M $io500_workdir/mdt_easy
  lfs setstripe -L mdt -E 1M $io500_workdir/mdt_hard
  lfs setstripe -C 240 -S 16M $io500_workdir/ior_hard
}

function setup_paths {
  # Set the paths to the binaries.  If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="/work/tools/mpi/gcc/openmpi/2.1.1/bin/mpirun"
  io500_mpiargs="--allow-run-as-root -np 240 -npernode 6 -hostfile ./hostfile-container"
}

function setup_ior_easy {
  io500_ior_easy_params="-t 2048k "
  #io500_ior_easy_params="-t 16m "
  echo -n ""
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
}

function setup_ior_hard {
  io500_ior_hard_api="POSIX"
  io500_ior_hard_api_specific_options=""
}

function setup_mdt_hard {
  io500_mdtest_hard_api="POSIX"
  io500_mdtest_hard_api_specific_options=""
}

function setup_find {
  #
  # setup the find command. This is an area where innovation is allowed.
  #    There are three default options provided. One is a serial find, one is python
  #    parallel version, one is C parallel version.  Current default is to use serial.
  #    But it is very slow. We recommend to either customize or use the C parallel version.
  #    For GPFS, we recommend to use the provided mmfind wrapper described below.
  #    Instructions below.
  #    If a custom approach is used, please provide enough info so others can reproduce.

  # the serial version that should run (SLOWLY) without modification
  #io500_find_mpi="False"
  #io500_find_cmd=$PWD/bin/sfind.sh
  #io500_find_cmd_args=""

  # a parallel version in C, the -s adds a stonewall
  #   for a real run, turn -s (stonewall) off or set it at 300 or more
  #   to prepare this (assuming you've run ./utilities/prepare.sh already):
  #   > cd build/pfind
  #   > ./prepare.sh
  #   > ./compile.sh
  #   > cp pfind ../../bin/
  #   If you use io500_find_mpi="True", then this will run with the same
  #   number of MPI nodes and ranks as the other phases.
  #   If you prefer another number, and fewer might be better here,
  #   Then you can set io500_find_mpi to be "False" and write a wrapper
  #   script for this which sets up MPI as you would like.  Then change
  #   io500_find_cmd to point to your wrapper script.
  io500_find_mpi="True"
  io500_find_cmd="$PWD/bin/pfind"
  # uses stonewalling, run pfind
  io500_find_cmd_args=""

  # for GPFS systems, you should probably use the provided mmfind wrapper
  # if you used ./utilities/prepare.sh, you'll find this wrapper in ./bin/mmfind.sh
  #io500_find_mpi="False"
  #io500_find_cmd="$PWD/bin/mmfind.sh"
  #io500_find_cmd_args=""
}

function setup_mdreal {
  echo -n ""
}

function run_benchmarks {
  # Important: source the io500_fixed.sh script.  Do not change it. If you discover
  # a need to change it, please email the mailing list to discuss
  source ./utilities/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}

# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about your hardware
# that you want to include publicly!
function extra_description {
  # TODO: Please add your information using the info-creator!
  # EXAMPLE:
  io500_info_system_name='xxx'      # e.g. Oakforest-PACS
}

main
ior_easy_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  7 13:00:21 2019
Command line        : /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/ior -r -R -t 2048k -b 44000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy/ior_file_easy -O stoneWallingStatusFile=/ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy/stonewall
Machine             : Linux c01-c0
TestID              : 0
StartTime           : Thu Nov  7 13:00:21 2019
Path                : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy
FS                  : 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 28.2%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 40
tasks               : 240
clients per node    : 6
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 42.97 GiB
aggregate filesize  : 10.07 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
read      42724      21397      0.011210    45056000   2048.00    0.507416   246.76     0.132448   247.17     0   
Max Read:  42724.14 MiB/sec (44799.51 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        42724.14   42724.14   42724.14       0.00   21362.07   21362.07   21362.07       0.00  247.16705         NA            NA     0    240   6    1   1     1        1         0    0      1 46137344000  2097152 10560000.0 POSIX      0
Finished            : Thu Nov  7 13:04:28 2019
ior_easy_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  7 12:33:59 2019
Command line        : /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/ior -w -t 2048k -b 44000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy/ior_file_easy -O stoneWallingStatusFile=/ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux c01-c0
TestID              : 0
StartTime           : Thu Nov  7 12:33:59 2019
Path                : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy
FS                  : 28.7 TiB   Used FS: 41.9%   Inodes: 232.0 Mi   Used Inodes: 0.0%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 40
tasks               : 240
clients per node    : 6
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 42.97 GiB
aggregate filesize  : 10.07 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 8361 max: 22000 -- min data: 16.3 GiB mean data: 37.7 GiB time: 300.2s
write     29305      14658      0.013646    45056000   2048.00    0.207726   360.21     0.209735   360.35     0   
Max Write: 29304.88 MiB/sec (30728.40 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       29304.88   29304.88   29304.88       0.00   14652.44   14652.44   14652.44       0.00  360.34950     300.18      30863.16     0    240   6    1   1     1        1         0    0      1 46137344000  2097152 10560000.0 POSIX      0
Finished            : Thu Nov  7 12:39:59 2019
ior_hard_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  7 13:06:50 2019
Command line        : /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/ior -r -R -s 125000 -a POSIX -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard/IOR_file -O stoneWallingStatusFile=/ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard/stonewall
Machine             : Linux c01-c0
TestID              : 0
StartTime           : Thu Nov  7 13:06:50 2019
Path                : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard
FS                  : 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 28.2%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 125000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 40
tasks               : 240
clients per node    : 6
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 1.28 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
read      43203      964606     30.96       45.91      45.91      0.147062   31.10      0.144752   31.13      0   
Max Read:  43203.28 MiB/sec (45301.92 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        43203.28   43203.28   43203.28       0.00  963706.70  963706.70  963706.70       0.00   31.12980         NA            NA     0    240   6    1   0     1        1         0    0 125000    47008    47008 1344909.6 POSIX      0
Finished            : Thu Nov  7 13:07:21 2019
ior_hard_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Thu Nov  7 12:45:29 2019
Command line        : /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/ior -w -s 125000 -a POSIX -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard/IOR_file -O stoneWallingStatusFile=/ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux c01-c0
TestID              : 0
StartTime           : Thu Nov  7 12:45:29 2019
Path                : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard
FS                  : 28.7 TiB   Used FS: 77.1%   Inodes: 253.4 Mi   Used Inodes: 18.1%

Options: 
api                 : POSIX
apiVersion          : 
test filename       : /ai400/datafiles/io500.2019.11.07-21.33.51/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 125000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 40
tasks               : 240
clients per node    : 6
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 1.28 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 87687 max: 125000 -- min data: 3.8 GiB mean data: 4.3 GiB time: 300.0s
write     3030.57    67612      415.04      45.91      45.91      0.127876   443.70     0.126167   443.78     0   
Max Write: 3030.57 MiB/sec (3177.79 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        3030.57    3030.57    3030.57       0.00   67601.00   67601.00   67601.00       0.00  443.78040     300.02       3488.95     0    240   6    1   0     1        1         0    0 125000    47008    47008 1344909.6 POSIX      0
Finished            : Thu Nov  7 12:52:53 2019
mdtest_easy_delete
-- started at 11/07/2019 13:08:26 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-r' '-F' '-P' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_easy' '-n' '200000' '-u' '-L' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_easy-stonewall' '-N' '1'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 28.2%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 48000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :     132755.615     132753.823     132755.273          0.397
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          5.334          5.334          5.334          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        361.572        361.567        361.568          0.001
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.187          0.187          0.187          0.000
-- finished at 11/07/2019 13:14:29 --

mdtest_easy_stat
-- started at 11/07/2019 13:04:36 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-T' '-F' '-P' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_easy' '-n' '200000' '-u' '-L' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_easy-stonewall' '-N' '1'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 28.2%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 48000000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :     451973.708     451965.315     451971.248          2.031
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :        106.203        106.201        106.201          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 11/07/2019 13:06:42 --

mdtest_easy_write
-- started at 11/07/2019 12:40:07 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-C' '-F' '-P' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_easy' '-n' '200000' '-u' '-L' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 77.1%   Inodes: 232.0 Mi   Used Inodes: 0.0%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 48000000 files
Continue stonewall hit min: 190481 max: 200000 avg: 195777.7 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :     152839.698     152836.691     152838.996          0.732
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA     156666.312             NA
   Tree creation             :         18.490         18.490         18.490          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        314.061        314.055        314.056          0.002
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        299.915             NA
   Tree creation             :          0.054          0.054          0.054          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 11/07/2019 12:45:21 --

mdtest_hard_delete
-- started at 11/07/2019 13:17:21 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard' '-n' '142500' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 10.2%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 34200000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :      75146.480      75146.181      75146.350          0.077
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.555          0.555          0.555          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        359.846        359.844        359.845          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          1.803          1.803          1.803          0.000
-- finished at 11/07/2019 13:23:22 --

mdtest_hard_read
-- started at 11/07/2019 13:14:36 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard' '-n' '142500' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 10.2%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 34200000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :     172593.290     172564.236     172591.827          2.528
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :        156.701        156.675        156.676          0.002
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 11/07/2019 13:17:13 --

mdtest_hard_stat
-- started at 11/07/2019 13:07:24 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard' '-n' '142500' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 28.2%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 34200000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :     449926.066     449911.935     449918.200          3.047
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         60.103         60.101         60.102          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 11/07/2019 13:08:24 --

mdtest_hard_write
-- started at 11/07/2019 12:53:00 --

mdtest-3.3.0+dev was launched with 240 total task(s) on 40 node(s)
Command line used: /work/BMLab/Lustre/io500_sc19/io-500-dev/bin/mdtest '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard' '-n' '142500' '-x' '/ai400/datafiles/io500.2019.11.07-21.33.51/mdt_hard-stonewall' '-a' 'POSIX' '-N' '1' '-W' '300'
Path: /ai400/datafiles/io500.2019.11.07-21.33.51
FS: 28.7 TiB   Used FS: 81.5%   Inodes: 253.4 Mi   Used Inodes: 18.1%

Nodemap: 111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2137 Shifting ranks by 6 for each phase.
240 tasks, 34200000 files
Continue stonewall hit min: 98602 max: 112671 avg: 105018.1 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :      79646.002      79644.957      79645.770          0.279
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA      84010.623             NA
   Tree creation             :        136.475        136.475        136.475          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        339.520        339.515        339.516          0.001
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.014             NA
   Tree creation             :          0.007          0.007          0.007          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 11/07/2019 12:58:40 --

result_summary
[RESULT] BW   phase 1            ior_easy_write               28.618 GB/s : time 360.21 seconds
[RESULT] IOPS phase 1         mdtest_easy_write              152.840 kiops : time 314.06 seconds
[RESULT] BW   phase 2            ior_hard_write                2.960 GB/s : time 443.70 seconds
[RESULT] IOPS phase 2         mdtest_hard_write               79.646 kiops : time 339.52 seconds
[RESULT] IOPS phase 3                      find              810.420 kiops : time  92.84 seconds
[RESULT] BW   phase 3             ior_easy_read               41.723 GB/s : time 246.76 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat              451.974 kiops : time 106.20 seconds
[RESULT] BW   phase 4             ior_hard_read               42.190 GB/s : time  31.10 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat              449.926 kiops : time  60.10 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete              132.756 kiops : time 361.57 seconds
[RESULT] IOPS phase 7          mdtest_hard_read              172.593 kiops : time 156.70 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete               75.146 kiops : time 369.08 seconds
[SCORE] Bandwidth 19.65 GB/s : IOPS 207.632 kiops : TOTAL 63.8746