- io500
-
#!/bin/bash
ROOT=/work/BMLab/2018/io500/io-500-dev
module purge
module load mpi/gcc/openmpi/2.1.1
export OMPI_MCA_btl_openib_if_include=mlx5_0:1
# Lustre OSS Setting
pdsh -w es14k-vm[1-4] lctl set_param \
osd-ldiskfs.*.read_cache_enable=0 \
obdfilter.*.writethrough_cache_enable=0 \
obdfilter.*.precreate_batch=1024
# Lustre Client Setting
pdsh -w c[01-10] lctl set_param \
osc.*.max_pages_per_rpc=16M \
osc.*.max_rpcs_in_flight=16 \
osc.*.max_dirty_mb=512 \
osc.*.checksums=0 \
llite.*.max_read_ahead_mb=2048
#llite.*.max_cached_mb=16G
#llite.*.max_read_ahead_mb=62G \
#llite.*.max_read_ahead_per_file_mb=62G \
#llite.*.max_read_ahead_whole_mb=62G
#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.
# Please also edit 'extra_description' function.
set -euo pipefail # better error handling
# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True" # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True" # does the creat phase and enables the subsequent read
io500_run_find="True"
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="False" # this one is optional
io500_cleanup_workdir="False" # this flag is currently ignored. You'll need to clean up your data files manually if you want to.
io500_stonewall_timer=300 # Stonewalling timer, stop with wearout after 300s with default test, set to 0, if you never want to abort...
# to run this benchmark, find and edit each of these functions.
# please also edit 'extra_description' function to help us collect the required data.
function main {
setup_directories
setup_paths
setup_ior_easy # required if you want a complete score
setup_ior_hard # required if you want a complete score
setup_mdt_easy # required if you want a complete score
setup_mdt_hard # required if you want a complete score
setup_find # required if you want a complete score
setup_mdreal # optional
run_benchmarks
}
function setup_directories {
# set directories for where the benchmark files are created and where the results will go.
# If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it.
timestamp=`date +%Y.%m.%d-%H.%M.%S` # create a uniquifier
#io500_workdir=$PWD/datafiles/io500.$timestamp # directory where the data will be stored
io500_workdir=/scratch0/io500.out
io500_result_dir=$PWD/results/$timestamp # the directory where the output results will be kept
mkdir -p $io500_workdir $io500_result_dir
lfs setdirstripe -c 8 $io500_workdir/mdt_easy
lfs setdirstripe -c 8 $io500_workdir/mdt_hard
lfs setdirstripe -c 8 -D $io500_workdir/mdt_easy
lfs setdirstripe -c 8 -D $io500_workdir/mdt_hard
mkdir -p $io500_workdir/ior_hard
lfs setstripe -c -1 -S 16M $io500_workdir/ior_hard
}
function setup_paths {
# Set the paths to the binaries. If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
io500_ior_cmd=$PWD/bin/ior
io500_mdtest_cmd=$PWD/bin/mdtest
io500_mdreal_cmd=$PWD/bin/md-real-io
io500_mpirun="mpirun"
io500_mpiargs="--allow-run-as-root"
}
function setup_ior_easy {
# io500_ior_easy_size is the amount of data written per rank in MiB units,
# but it can be any number as long as it is somehow used to scale the IOR
# runtime as part of io500_ior_easy_params
io500_ior_easy_size=55000
# 2M writes, 2 GB per proc, file per proc
io500_ior_easy_params="-t 2048k -b ${io500_ior_easy_size}m -F"
}
function setup_mdt_easy {
io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
io500_mdtest_easy_files_per_proc=270000
}
function setup_ior_hard {
io500_ior_hard_writes_per_proc=7500
io500_ior_hard_other_options="" #e.g., -E to keep precreated files using lfs setstripe, or -a MPIIO
}
function setup_mdt_hard {
io500_mdtest_hard_files_per_proc=40000
io500_mdtest_hard_other_options=""
}
function setup_find {
#
# setup the find command. This is an area where innovation is allowed.
# There are three default options provided. One is a serial find, one is python
# parallel version, one is C parallel version. Current default is to use serial.
# But it is very slow. We recommend to either customize or use the C parallel version.
# For GPFS, we recommend to use the provided mmfind wrapper described below.
# Instructions below.
# If a custom approach is used, please provide enough info so others can reproduce.
# the serial version that should run (SLOWLY) without modification
#io500_find_mpi="False"
#io500_find_cmd=$PWD/bin/sfind.sh
#io500_find_cmd_args=""
# a parallel version in C, the -s adds a stonewall
# for a real run, turn -s (stonewall) off or set it at 300 or more
# to prepare this (assuming you've run ./utilities/prepare.sh already):
# > cd build/pfind
# > ./prepare.sh
# > ./compile.sh
# > cp pfind ../../bin/
# If you use io500_find_mpi="True", then this will run with the same
# number of MPI nodes and ranks as the other phases.
# If you prefer another number, and fewer might be better here,
# Then you can set io500_find_mpi to be "False" and write a wrapper
# script for this which sets up MPI as you would like. Then change
# io500_find_cmd to point to your wrapper script.
io500_find_mpi="True"
io500_find_cmd="$PWD/bin/pfind"
# uses stonewalling, run pfind
io500_find_cmd_args="-s $io500_stonewall_timer -r $io500_result_dir/pfind_results"
# for GPFS systems, you should probably use the provided mmfind wrapper
# if you used ./utilities/prepare.sh, you'll find this wrapper in ./bin/mmfind.sh
#io500_find_mpi="False"
#io500_find_cmd="$PWD/bin/mmfind.sh"
#io500_find_cmd_args=""
}
function setup_mdreal {
io500_mdreal_params="-P=5000 -I=1000"
}
function run_benchmarks {
# Important: source the io500_fixed.sh script. Do not change it. If you discover
# a need to change it, please email the mailing list to discuss
source ./utilities/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}
# Add key/value pairs defining your system
# Feel free to add extra ones if you'd like
function extra_description {
# top level info
io500_info_system_name='xxx' # e.g. Oakforest-PACS
io500_info_institute_name='xxx' # e.g. JCAHPC
io500_info_storage_age_in_months='xxx' # not install date but age since last refresh
io500_info_storage_install_date='xxx' # MM/YY
io500_info_filesystem='xxx' # e.g. BeeGFS, DataWarp, GPFS, IME, Lustre
io500_info_filesystem_version='xxx'
io500_info_filesystem_vendor='xxx'
# client side info
io500_info_num_client_nodes='xxx'
io500_info_procs_per_node='xxx'
# server side info
io500_info_num_metadata_server_nodes='xxx'
io500_info_num_data_server_nodes='xxx'
io500_info_num_data_storage_devices='xxx' # if you have 5 data servers, and each has 5 drives, then this number is 25
io500_info_num_metadata_storage_devices='xxx' # if you have 2 metadata servers, and each has 5 drives, then this number is 10
io500_info_data_storage_type='xxx' # HDD, SSD, persistent memory, etc, feel free to put specific models
io500_info_metadata_storage_type='xxx' # HDD, SSD, persistent memory, etc, feel free to put specific models
io500_info_storage_network='xxx' # infiniband, omnipath, ethernet, etc
io500_info_storage_interface='xxx' # SAS, SATA, NVMe, etc
# miscellaneous
io500_info_whatever='WhateverElseYouThinkRelevant'
}
main
- ior_easy_read
-
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began : Thu Nov 8 17:01:00 2018
Command line : /work/BMLab/2018/io500/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 2048k -b 55000m -F -o /scratch0/io500.out/ior_easy/ior_file_easy -O stoneWallingStatusFile=/scratch0/io500.out/ior_easy/stonewall
Machine : Linux c01
TestID : 0
StartTime : Thu Nov 8 17:01:00 2018
Path : /scratch0/io500.out/ior_easy
FS : 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 10.9%
Options:
api : POSIX
apiVersion :
test filename : /scratch0/io500.out/ior_easy/ior_file_easy
access : file-per-process
type : independent
segments : 1
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
tasks : 240
clients per node : 24
repetitions : 1
xfersize : 2 MiB
blocksize : 53.71 GiB
aggregate filesize : 12.59 TiB
Results:
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---------- --------- -------- -------- -------- -------- ----
read 36223 56320000 2048.00 0.129334 364.37 0.119904 364.41 0
Max Read: 36223.20 MiB/sec (37982.78 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
read 36223.20 36223.20 36223.20 0.00 18111.60 18111.60 18111.60 0.00 364.40735 0 240 24 1 1 1 1 0 0 1 57671680000 2097152 13200000.0 POSIX 0
Finished : Thu Nov 8 17:07:04 2018
- ior_easy_write
-
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began : Thu Nov 8 16:37:51 2018
Command line : /work/BMLab/2018/io500/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 2048k -b 55000m -F -o /scratch0/io500.out/ior_easy/ior_file_easy -O stoneWallingStatusFile=/scratch0/io500.out/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine : Linux c01
TestID : 0
StartTime : Thu Nov 8 16:37:51 2018
Path : /scratch0/io500.out/ior_easy
FS : 79.0 TiB Used FS: 0.0% Inodes: 640.0 Mi Used Inodes: 0.0%
Options:
api : POSIX
apiVersion :
test filename : /scratch0/io500.out/ior_easy/ior_file_easy
access : file-per-process
type : independent
segments : 1
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
tasks : 240
clients per node : 24
repetitions : 1
xfersize : 2 MiB
blocksize : 53.71 GiB
aggregate filesize : 12.59 TiB
stonewallingTime : 300
stoneWallingWearOut : 1
Results:
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---------- --------- -------- -------- -------- -------- ----
stonewalling pairs accessed min: 18818 max: 27500 -- min data: 36.8 GiB mean data: 51.6 GiB time: 300.0s
write 38441 56320000 2048.00 0.149619 343.31 0.144904 343.38 0
Max Write: 38440.89 MiB/sec (40308.19 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
write 38440.89 38440.89 38440.89 0.00 19220.44 19220.44 19220.44 0.00 343.38438 0 240 24 1 1 1 1 0 0 1 57671680000 2097152 13200000.0 POSIX 0
Finished : Thu Nov 8 16:43:34 2018
- ior_hard_read
-
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began : Thu Nov 8 17:09:10 2018
Command line : /work/BMLab/2018/io500/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 7500 -o /scratch0/io500.out/ior_hard/IOR_file -O stoneWallingStatusFile=/scratch0/io500.out/ior_hard/stonewall
Machine : Linux c01
TestID : 0
StartTime : Thu Nov 8 17:09:10 2018
Path : /scratch0/io500.out/ior_hard
FS : 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 10.9%
Options:
api : POSIX
apiVersion :
test filename : /scratch0/io500.out/ior_hard/IOR_file
access : single-shared-file
type : independent
segments : 7500
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
tasks : 240
clients per node : 24
repetitions : 1
xfersize : 47008 bytes
blocksize : 47008 bytes
aggregate filesize : 78.80 GiB
Results:
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---------- --------- -------- -------- -------- -------- ----
read 4738 45.91 45.91 0.136728 16.92 0.139272 17.03 0
Max Read: 4738.28 MiB/sec (4968.45 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
read 4738.28 4738.28 4738.28 0.00 105693.66 105693.66 105693.66 0.00 17.03035 0 240 24 1 0 1 1 0 0 7500 47008 47008 80694.6 POSIX 0
Finished : Thu Nov 8 17:09:27 2018
- ior_hard_write
-
IOR-3.2.0: MPI Coordinated Test of Parallel I/O
Began : Thu Nov 8 16:49:01 2018
Command line : /work/BMLab/2018/io500/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 7500 -o /scratch0/io500.out/ior_hard/IOR_file -O stoneWallingStatusFile=/scratch0/io500.out/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine : Linux c01
TestID : 0
StartTime : Thu Nov 8 16:49:01 2018
Path : /scratch0/io500.out/ior_hard
FS : 79.0 TiB Used FS: 15.9% Inodes: 640.0 Mi Used Inodes: 9.7%
Options:
api : POSIX
apiVersion :
test filename : /scratch0/io500.out/ior_hard/IOR_file
access : single-shared-file
type : independent
segments : 7500
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
tasks : 240
clients per node : 24
repetitions : 1
xfersize : 47008 bytes
blocksize : 47008 bytes
aggregate filesize : 78.80 GiB
stonewallingTime : 300
stoneWallingWearOut : 1
Results:
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---------- --------- -------- -------- -------- -------- ----
stonewalling pairs accessed min: 7500 max: 7500 -- min data: 0.3 GiB mean data: 0.3 GiB time: 299.9s
write 268.80 45.91 45.91 0.173708 300.16 0.126608 300.21 0
Max Write: 268.80 MiB/sec (281.85 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
write 268.80 268.80 268.80 0.00 5995.84 5995.84 5995.84 0.00 300.20790 0 240 24 1 0 1 1 0 0 7500 47008 47008 80694.6 POSIX 0
Finished : Thu Nov 8 16:54:01 2018
- mdtest_easy_delete
-
-- started at 11/08/2018 17:11:15 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-r" "-F" "-d" "/scratch0/io500.out/mdt_easy" "-n" "270000" "-u" "-L" "-x" "/scratch0/io500.out/mdt_easy-stonewall"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 10.9%
240 tasks, 64800000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 226093.615 226093.615 226093.615 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 2.539 2.539 2.539 0.000
-- finished at 11/08/2018 17:16:02 --
- mdtest_easy_stat
-
-- started at 11/08/2018 17:07:06 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-T" "-F" "-d" "/scratch0/io500.out/mdt_easy" "-n" "270000" "-u" "-L" "-x" "/scratch0/io500.out/mdt_easy-stonewall"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 10.9%
240 tasks, 64800000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 527669.323 527669.323 527669.323 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 11/08/2018 17:09:09 --
- mdtest_easy_write
-
-- started at 11/08/2018 16:43:35 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-C" "-F" "-d" "/scratch0/io500.out/mdt_easy" "-n" "270000" "-u" "-L" "-x" "/scratch0/io500.out/mdt_easy-stonewall" "-W" "300"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 15.9% Inodes: 640.0 Mi Used Inodes: 0.0%
240 tasks, 64800000 files
stonewall rank 50: 241374 of 270000
stonewall rank 122: 249949 of 270000
stonewall rank 170: 224063 of 270000
stonewall rank 75: 252102 of 270000
stonewall rank 51: 236692 of 270000
stonewall rank 123: 236295 of 270000
stonewall rank 171: 232896 of 270000
stonewall rank 82: 238685 of 270000
stonewall rank 58: 225319 of 270000
stonewall rank 130: 230872 of 270000
stonewall rank 178: 227195 of 270000
stonewall rank 83: 232301 of 270000
stonewall rank 59: 220265 of 270000
stonewall rank 131: 247315 of 270000
stonewall rank 179: 233030 of 270000
stonewall rank 90: 247282 of 270000
stonewall rank 66: 226172 of 270000
stonewall rank 138: 256969 of 270000
stonewall rank 186: 240554 of 270000
stonewall rank 91: 231903 of 270000
stonewall rank 67: 237575 of 270000
stonewall rank 139: 240066 of 270000
stonewall rank 187: 228748 of 270000
stonewall rank 49: 233905 of 270000
stonewall rank 73: 233437 of 270000
stonewall rank 53: 230706 of 270000
stonewall rank 121: 246082 of 270000
stonewall rank 169: 229023 of 270000
stonewall rank 77: 240586 of 270000
stonewall rank 57: 217720 of 270000
stonewall rank 125: 237638 of 270000
stonewall rank 173: 236329 of 270000
stonewall rank 81: 239566 of 270000
stonewall rank 61: 236797 of 270000
stonewall rank 129: 245019 of 270000
stonewall rank 177: 236115 of 270000
stonewall rank 85: 243787 of 270000
stonewall rank 65: 223261 of 270000
stonewall rank 133: 243340 of 270000
stonewall rank 181: 224738 of 270000
stonewall rank 89: 226125 of 270000
stonewall rank 69: 241793 of 270000
stonewall rank 137: 237312 of 270000
stonewall rank 185: 226612 of 270000
stonewall rank 93: 239165 of 270000
stonewall rank 141: 235088 of 270000
stonewall rank 189: 241280 of 270000
stonewall rank 175: 237624 of 270000
stonewall rank 55: 245346 of 270000
stonewall rank 142: 246247 of 270000
stonewall rank 79: 238944 of 270000
stonewall rank 183: 232365 of 270000
stonewall rank 143: 242261 of 270000
stonewall rank 62: 232044 of 270000
stonewall rank 120: 252789 of 270000
stonewall rank 191: 238779 of 270000
stonewall rank 63: 230078 of 270000
stonewall rank 124: 260014 of 270000
stonewall rank 168: 243365 of 270000
stonewall rank 87: 241730 of 270000
stonewall rank 71: 232890 of 270000
stonewall rank 126: 243190 of 270000
stonewall rank 172: 231028 of 270000
stonewall rank 94: 233550 of 270000
stonewall rank 52: 232527 of 270000
stonewall rank 127: 256968 of 270000
stonewall rank 174: 237876 of 270000
stonewall rank 95: 243919 of 270000
stonewall rank 54: 233062 of 270000
stonewall rank 132: 244873 of 270000
stonewall rank 176: 232490 of 270000
stonewall rank 74: 242306 of 270000
stonewall rank 56: 231103 of 270000
stonewall rank 134: 252661 of 270000
stonewall rank 180: 223667 of 270000
stonewall rank 78: 244144 of 270000
stonewall rank 60: 229405 of 270000
stonewall rank 135: 227873 of 270000
stonewall rank 182: 239554 of 270000
stonewall rank 80: 239692 of 270000
stonewall rank 68: 232628 of 270000
stonewall rank 136: 246114 of 270000
stonewall rank 184: 232101 of 270000
stonewall rank 84: 225508 of 270000
stonewall rank 70: 228021 of 270000
stonewall rank 140: 231134 of 270000
stonewall rank 188: 241515 of 270000
stonewall rank 86: 240377 of 270000
stonewall rank 48: 229391 of 270000
stonewall rank 128: 242927 of 270000
stonewall rank 190: 233128 of 270000
stonewall rank 88: 235162 of 270000
stonewall rank 64: 226861 of 270000
stonewall rank 92: 243298 of 270000
stonewall rank 72: 224878 of 270000
stonewall rank 76: 229449 of 270000
Continue stonewall hit min: 217720 max: 270000 avg: 256666.3
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 199684.535 199684.535 199684.535 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 17.681 17.681 17.681 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 11/08/2018 16:49:00 --
- mdtest_hard_delete
-
-- started at 11/08/2018 17:19:06 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-r" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch0/io500.out/mdt_hard" "-n" "40000" "-x" "/scratch0/io500.out/mdt_hard-stonewall"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 1.2%
240 tasks, 9600000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 58841.682 58841.682 58841.682 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 8.436 8.436 8.436 0.000
-- finished at 11/08/2018 17:21:28 --
- mdtest_hard_read
-
-- started at 11/08/2018 17:16:03 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-E" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch0/io500.out/mdt_hard" "-n" "40000" "-x" "/scratch0/io500.out/mdt_hard-stonewall"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 1.3%
240 tasks, 9600000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 46141.397 46141.397 46141.397 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 11/08/2018 17:19:05 --
- mdtest_hard_stat
-
-- started at 11/08/2018 17:09:28 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-T" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch0/io500.out/mdt_hard" "-n" "40000" "-x" "/scratch0/io500.out/mdt_hard-stonewall"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 16.1% Inodes: 640.0 Mi Used Inodes: 10.9%
240 tasks, 9600000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 79475.599 79475.599 79475.599 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 11/08/2018 17:11:14 --
- mdtest_hard_write
-
-- started at 11/08/2018 16:54:02 --
mdtest-1.9.3 was launched with 240 total task(s) on 10 node(s)
Command line used: /work/BMLab/2018/io500/io-500-dev/bin/mdtest "-C" "-t" "-F" "-w" "3901" "-e" "3901" "-d" "/scratch0/io500.out/mdt_hard" "-n" "40000" "-x" "/scratch0/io500.out/mdt_hard-stonewall" "-W" "300"
Path: /scratch0/io500.out
FS: 79.0 TiB Used FS: 16.0% Inodes: 640.0 Mi Used Inodes: 9.7%
240 tasks, 9600000 files
stonewall rank 1: 33301 of 34919
stonewall rank 2: 33357 of 34919
stonewall rank 3: 33490 of 34919
stonewall rank 5: 33592 of 34919
stonewall rank 6: 33357 of 34919
stonewall rank 7: 33434 of 34919
stonewall rank 9: 33388 of 34919
stonewall rank 11: 33496 of 34919
stonewall rank 13: 33405 of 34919
stonewall rank 14: 33444 of 34919
stonewall rank 15: 33393 of 34919
stonewall rank 17: 33468 of 34919
stonewall rank 18: 33378 of 34919
stonewall rank 19: 33417 of 34919
stonewall rank 20: 33254 of 34919
stonewall rank 21: 33338 of 34919
stonewall rank 22: 33348 of 34919
stonewall rank 23: 33347 of 34919
stonewall rank 8: 33429 of 34919
stonewall rank 10: 33664 of 34919
stonewall rank 12: 33444 of 34919
stonewall rank 16: 33436 of 34919
stonewall rank 4: 33496 of 34919
stonewall rank 169: 34450 of 34919
stonewall rank 73: 28526 of 34919
stonewall rank 97: 29325 of 34919
stonewall rank 145: 28119 of 34919
stonewall rank 193: 26788 of 34919
stonewall rank 121: 33651 of 34919
stonewall rank 25: 28764 of 34919
stonewall rank 49: 33591 of 34919
stonewall rank 217: 28821 of 34919
stonewall rank 74: 28620 of 34919
stonewall rank 170: 34479 of 34919
stonewall rank 98: 29377 of 34919
stonewall rank 146: 28119 of 34919
stonewall rank 194: 26703 of 34919
stonewall rank 122: 33471 of 34919
stonewall rank 26: 28809 of 34919
stonewall rank 50: 33288 of 34919
stonewall rank 218: 28854 of 34919
stonewall rank 75: 28488 of 34919
stonewall rank 171: 34500 of 34919
stonewall rank 99: 29529 of 34919
stonewall rank 147: 28113 of 34919
stonewall rank 195: 26725 of 34919
stonewall rank 123: 33478 of 34919
stonewall rank 27: 28588 of 34919
stonewall rank 51: 33321 of 34919
stonewall rank 219: 28921 of 34919
stonewall rank 77: 28608 of 34919
stonewall rank 173: 34448 of 34919
stonewall rank 101: 29418 of 34919
stonewall rank 148: 27976 of 34919
stonewall rank 197: 26854 of 34919
stonewall rank 124: 33550 of 34919
stonewall rank 28: 28688 of 34919
stonewall rank 53: 33406 of 34919
stonewall rank 221: 28808 of 34919
stonewall rank 78: 28328 of 34919
stonewall rank 174: 34474 of 34919
stonewall rank 102: 29485 of 34919
stonewall rank 149: 28204 of 34919
stonewall rank 198: 26800 of 34919
stonewall rank 125: 33389 of 34919
stonewall rank 29: 28818 of 34919
stonewall rank 54: 33501 of 34919
stonewall rank 222: 28765 of 34919
stonewall rank 79: 28494 of 34919
stonewall rank 177: 34720 of 34919
stonewall rank 103: 29491 of 34919
stonewall rank 150: 28207 of 34919
stonewall rank 199: 26789 of 34919
stonewall rank 126: 33407 of 34919
stonewall rank 30: 28953 of 34919
stonewall rank 55: 33389 of 34919
Continue stonewall hit min: 26546 max: 34919 avg: 30529.7
stonewall rank 0: 33208 of 34919
stonewall rank 223: 28744 of 34919
stonewall rank 81: 28343 of 34919
stonewall rank 179: 34655 of 34919
stonewall rank 104: 29389 of 34919
stonewall rank 151: 28123 of 34919
stonewall rank 201: 27020 of 34919
stonewall rank 127: 33552 of 34919
stonewall rank 31: 28727 of 34919
stonewall rank 57: 33537 of 34919
stonewall rank 224: 28956 of 34919
stonewall rank 82: 28346 of 34919
stonewall rank 183: 34743 of 34919
stonewall rank 105: 29431 of 34919
stonewall rank 152: 28078 of 34919
stonewall rank 202: 26742 of 34919
stonewall rank 129: 33492 of 34919
stonewall rank 33: 28893 of 34919
stonewall rank 58: 33577 of 34919
stonewall rank 225: 28825 of 34919
stonewall rank 83: 28519 of 34919
stonewall rank 185: 34769 of 34919
stonewall rank 106: 29292 of 34919
stonewall rank 153: 28541 of 34919
stonewall rank 203: 26882 of 34919
stonewall rank 130: 33333 of 34919
stonewall rank 34: 28996 of 34919
stonewall rank 59: 33393 of 34919
stonewall rank 226: 28869 of 34919
stonewall rank 85: 28552 of 34919
stonewall rank 186: 34610 of 34919
stonewall rank 107: 29478 of 34919
stonewall rank 154: 28087 of 34919
stonewall rank 204: 26568 of 34919
stonewall rank 131: 33607 of 34919
stonewall rank 35: 28812 of 34919
stonewall rank 62: 33606 of 34919
stonewall rank 227: 28735 of 34919
stonewall rank 86: 28417 of 34919
stonewall rank 187: 34437 of 34919
stonewall rank 108: 29343 of 34919
stonewall rank 155: 28173 of 34919
stonewall rank 205: 26612 of 34919
stonewall rank 132: 33268 of 34919
stonewall rank 36: 28826 of 34919
stonewall rank 63: 33368 of 34919
stonewall rank 228: 28722 of 34919
stonewall rank 87: 28627 of 34919
stonewall rank 189: 34457 of 34919
stonewall rank 109: 29382 of 34919
stonewall rank 156: 28204 of 34919
stonewall rank 206: 26621 of 34919
stonewall rank 133: 33617 of 34919
stonewall rank 37: 28711 of 34919
stonewall rank 65: 33356 of 34919
stonewall rank 229: 28917 of 34919
stonewall rank 88: 28421 of 34919
stonewall rank 190: 34441 of 34919
stonewall rank 110: 29557 of 34919
stonewall rank 157: 28263 of 34919
stonewall rank 207: 26815 of 34919
stonewall rank 134: 33480 of 34919
stonewall rank 38: 28806 of 34919
stonewall rank 66: 33429 of 34919
stonewall rank 230: 28827 of 34919
stonewall rank 89: 28604 of 34919
stonewall rank 191: 34503 of 34919
stonewall rank 111: 29500 of 34919
stonewall rank 158: 28094 of 34919
stonewall rank 209: 26815 of 34919
stonewall rank 135: 33710 of 34919
stonewall rank 39: 28723 of 34919
stonewall rank 67: 33253 of 34919
stonewall rank 231: 28834 of 34919
stonewall rank 90: 28615 of 34919
stonewall rank 168: 34413 of 34919
stonewall rank 113: 29407 of 34919
stonewall rank 159: 28281 of 34919
stonewall rank 210: 26733 of 34919
stonewall rank 137: 33577 of 34919
stonewall rank 40: 28652 of 34919
stonewall rank 68: 33447 of 34919
stonewall rank 232: 28804 of 34919
stonewall rank 91: 28390 of 34919
stonewall rank 172: 34158 of 34919
stonewall rank 114: 29359 of 34919
stonewall rank 161: 28243 of 34919
stonewall rank 211: 26731 of 34919
stonewall rank 138: 33297 of 34919
stonewall rank 41: 28587 of 34919
stonewall rank 69: 33650 of 34919
stonewall rank 233: 28937 of 34919
stonewall rank 92: 28493 of 34919
stonewall rank 176: 34447 of 34919
stonewall rank 115: 29464 of 34919
stonewall rank 162: 28103 of 34919
stonewall rank 212: 26546 of 34919
stonewall rank 139: 33597 of 34919
stonewall rank 42: 28871 of 34919
stonewall rank 70: 33407 of 34919
stonewall rank 234: 28816 of 34919
stonewall rank 93: 28402 of 34919
stonewall rank 178: 34547 of 34919
stonewall rank 116: 29320 of 34919
stonewall rank 163: 28421 of 34919
stonewall rank 213: 26845 of 34919
stonewall rank 140: 33243 of 34919
stonewall rank 43: 28847 of 34919
stonewall rank 71: 33536 of 34919
stonewall rank 235: 29125 of 34919
stonewall rank 94: 28328 of 34919
stonewall rank 180: 34603 of 34919
stonewall rank 117: 29427 of 34919
stonewall rank 164: 28109 of 34919
stonewall rank 214: 26713 of 34919
stonewall rank 141: 33507 of 34919
stonewall rank 44: 28933 of 34919
stonewall rank 48: 33501 of 34919
stonewall rank 236: 29034 of 34919
stonewall rank 95: 28481 of 34919
stonewall rank 181: 34661 of 34919
stonewall rank 118: 29314 of 34919
stonewall rank 165: 28184 of 34919
stonewall rank 215: 26815 of 34919
stonewall rank 142: 33476 of 34919
stonewall rank 45: 29021 of 34919
stonewall rank 52: 33488 of 34919
stonewall rank 237: 28849 of 34919
stonewall rank 72: 28605 of 34919
stonewall rank 182: 34403 of 34919
stonewall rank 119: 29349 of 34919
stonewall rank 166: 28171 of 34919
stonewall rank 196: 26752 of 34919
stonewall rank 143: 33379 of 34919
stonewall rank 46: 28732 of 34919
stonewall rank 56: 33226 of 34919
stonewall rank 238: 28947 of 34919
stonewall rank 76: 28415 of 34919
stonewall rank 184: 34369 of 34919
stonewall rank 96: 29402 of 34919
stonewall rank 167: 28163 of 34919
stonewall rank 200: 26821 of 34919
stonewall rank 120: 33502 of 34919
stonewall rank 47: 28799 of 34919
stonewall rank 60: 33563 of 34919
stonewall rank 239: 28899 of 34919
stonewall rank 80: 28358 of 34919
stonewall rank 188: 34297 of 34919
stonewall rank 100: 29275 of 34919
stonewall rank 144: 28148 of 34919
stonewall rank 208: 26765 of 34919
stonewall rank 128: 33311 of 34919
stonewall rank 24: 28651 of 34919
stonewall rank 61: 33496 of 34919
stonewall rank 216: 28831 of 34919
stonewall rank 84: 28300 of 34919
stonewall rank 112: 29364 of 34919
stonewall rank 160: 27958 of 34919
stonewall rank 192: 26774 of 34919
stonewall rank 136: 33381 of 34919
stonewall rank 32: 28790 of 34919
stonewall rank 64: 33247 of 34919
stonewall rank 220: 28792 of 34919
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 24347.849 24347.849 24347.849 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 7.125 7.125 7.125 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 11/08/2018 17:00:37 --
- result_summary
-
[RESULT] BW phase 1 ior_easy_write 37.540 GB/s : time 343.38 seconds
[RESULT] IOPS phase 1 mdtest_easy_write 199.685 kiops : time 325.87 seconds
[RESULT] BW phase 2 ior_hard_write 0.262 GB/s : time 300.21 seconds
[RESULT] IOPS phase 2 mdtest_hard_write 24.348 kiops : time 395.55 seconds
[RESULT] IOPS phase 3 find 3332.110 kiops : time 21.96 seconds
[RESULT] BW phase 3 ior_easy_read 35.374 GB/s : time 364.41 seconds
[RESULT] IOPS phase 4 mdtest_easy_stat 527.669 kiops : time 124.08 seconds
[RESULT] BW phase 4 ior_hard_read 4.627 GB/s : time 17.03 seconds
[RESULT] IOPS phase 5 mdtest_hard_stat 79.476 kiops : time 106.64 seconds
[RESULT] IOPS phase 6 mdtest_easy_delete 226.094 kiops : time 288.22 seconds
[RESULT] IOPS phase 7 mdtest_hard_read 46.141 kiops : time 182.72 seconds
[RESULT] IOPS phase 8 mdtest_hard_delete 58.842 kiops : time 143.64 seconds
[SCORE] Bandwidth 6.33725 GB/s : IOPS 159.413 kiops : TOTAL 31.7843