- io500
-
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.
function setup_paths {
# Set the paths to the binaries and how to launch MPI jobs.
# If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
io500_ior_cmd=$PWD/bin/ior
io500_mdtest_cmd=$PWD/bin/mdtest
io500_mdreal_cmd=$PWD/bin/md-real-io
io500_mpirun="mpirun"
io500_mpiargs="-npernode 36 --hostfile /home/fedora/io500-app/hosts"
}
function setup_directories {
local workdir
local resultdir
local ts
# set directories where benchmark files are created and where the results go
# If you want to set up stripe tuning on your output directories or anything
# similar, then this is the right place to do it. This creates the output
# directories for both the app run and the script run.
# Ceph Stuff
MAX_MDS=61
MPI_RANKS=360
CEPH_MDTEST_EASY_PINNING=distributed # none, export-rr, distributed, random
CEPH_MDTEST_EASY_PINNING_RANDOM=1.0
# MDTEST_EASY_FILES_PER_PROC=400000
# MDTEST_HARD_FILES_PER_PROC=100000
# MDTEST_HARD_FILES=$(( ${MPI_RANKS} * ${MDTEST_HARD_FILES_PER_PROC} ))
timestamp=$(date +%Y.%m.%d-%H.%M.%S) # create a uniquifier
[ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
ts="$timestamp" || ts="io500"
# directory where the data will be stored
workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
io500_workdir=$workdir-scr
[ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
ts="$timestamp" || ts="io500"
# the directory where the output results will be kept
resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
io500_result_dir=$resultdir-scr
mkdir -p $workdir-{scr,app} $resultdir-{scr,app}
for pf in scr app
do
wd="$workdir-${pf}"
rd="$resultdir-${pf}"
mdt_easy_parent="${wd}/mdt_easy/test-dir.0-0"
mdt_hard_parent="${wd}/mdt_hard/test-dir.0-0"
mdt_hard_dir="${wd}/mdt_hard/test-dir.0-0/mdtest_tree.0"
if [ $pf == "app" ]
then
mdt_easy_parent="${wd}/mdtest-easy/test-dir.0-0"
mdt_hard_parent="${wd}/mdtest-hard/test-dir.0-0"
mdt_hard_dir="${wd}/mdtest-hard/test-dir.0-0/mdtest_tree.0"
fi
# Create the result directory and "top-level" mdt parent directories
# (Should be legal based on io500 submission rules #11)
mkdir -p "${rd}"
mkdir -p "${mdt_easy_parent}"
mkdir -p "${mdt_hard_parent}"
# *** Per-Directory Round-Robin Pinning (Minus Auth MDS) ***
# This option likely violates rule #11 by creating the mdtest directories
# (but directory creation time doesn't actually affect the score?)
# Only use for testing currently.
if [[ "${CEPH_MDTEST_EASY_PINNING}" == "export-rr" ]]
then
setfattr -n ceph.dir.pin -v 0 "${mdt_easy_parent}"
MOD=$(( ${MAX_MDS} - 1 ))
MAXRANK=$(( ${MPI_RANKS} ))
for (( RANK=0; RANK<${MAXRANK}; RANK++ ))
do
n=0
if [ ${MOD} -gt 0 ]
then
n=$(( ${RANK}%${MOD} + 1 ))
fi
echo "MPI rank ${RANK} assigned to mds rank ${n}"
mkdir -p "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
setfattr -n ceph.dir.pin -v ${n} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
# setfattr -n ceph.dir.expected_files -v ${MDTEST_EASY_FILES_PER_PROC} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
done
# *** Distributed Pinning ***
# Shouldn't violate rule #11 because we are working on the parent dir
elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "distributed" ]]
then
setfattr -n ceph.dir.pin.distributed -v 1 ${mdt_easy_parent}
# *** Random Pinning ***
# Shouldn't violate rule #11 because we are working on the parent dir
elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "random" ]]
then
setfattr -n ceph.dir.pin.random -v ${CEPH_MDTEST_EASY_PINNING_RANDOM} ${mdt_easy_parent}
fi;
# *** Experimental Hard Directory expected_files and "hot" hints ***
# This option likely violates rule #11 because it pre-creates the (single!)
# mdtest subdirectory workdir.
# Only use for testing currently with https://github.com/ceph/ceph/pull/34574.
# echo "Setting expected mdtest hard files to ${MDTEST_HARD_FILES}"
# mkdir -p "${mdt_hard_dir}"
# setfattr -n ceph.dir.expected_files -v ${MDTEST_HARD_FILES} "${mdt_hard_dir}"
# setfattr -n ceph.dir.hot -v 1 "${mdt_hard_dir}"
done
}
# you should not edit anything below this line
set -eo pipefail # better error handling
io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
echo "error: ini file must be specified. usage: $0 "
exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
echo "error: ini file '$io500_ini' not found or empty"
exit 2
fi
function get_ini_section_param() {
local section="$1"
local param="$2"
local inside=false
while read LINE; do
LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
$inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
[[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
! $inside && continue
#echo $LINE | awk -F = "/^$param/ { print \$2 }"
if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
# echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
return
fi
done < $io500_ini
echo ""
}
function get_ini_param() {
local section="$1"
local param="$2"
local default="$3"
# try and get the most-specific param first, then more generic params
val=$(get_ini_section_param $section $param)
[ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
[ -n "$val" ] || val="$(get_ini_section_param global $param)"
echo "${val:-$default}" |
sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}
function get_ini_run_param() {
local section="$1"
local default="$2"
local val
val=$(get_ini_section_param $section noRun)
# logic is reversed from "noRun=TRUE" to "run=False"
[[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}
function get_ini_global_param() {
local param="$1"
local default="$2"
local val
val=$(get_ini_section_param global $param |
sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')
echo "${val:-$default}"
}
# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"
# to run this benchmark, find and edit each of these functions. Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
setup_directories
setup_paths
setup_ior_easy # required if you want a complete score
setup_ior_hard # required if you want a complete score
setup_mdt_easy # required if you want a complete score
setup_mdt_hard # required if you want a complete score
setup_find # required if you want a complete score
setup_mdreal # optional
run_benchmarks
if [[ ! -s "system-information.txt" ]]; then
echo "Warning: please create a system-information.txt description by"
echo "copying the information from https://vi4io.org/io500-info-creator/"
else
cp "system-information.txt" $io500_result_dir
fi
create_tarball
}
function setup_ior_easy {
local params
io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
val=$(get_ini_param ior-easy API POSIX)
[ -n "$val" ] && params+=" -a $val"
val="$(get_ini_param ior-easy transferSize)"
[ -n "$val" ] && params+=" -t $val"
val="$(get_ini_param ior-easy hintsFileName)"
[ -n "$val" ] && params+=" -U $val"
val="$(get_ini_param ior-easy posix.odirect)"
[ "$val" = "True" ] && params+=" --posix.odirect"
val="$(get_ini_param ior-easy verbosity)"
if [ -n "$val" ]; then
for i in $(seq $val); do
params+=" -v"
done
fi
io500_ior_easy_params="$params"
echo -n ""
}
function setup_mdt_easy {
io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
val=$(get_ini_param mdtest-easy n 1000000)
[ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
val=$(get_ini_param mdtest-easy API POSIX)
[ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
val=$(get_ini_param mdtest-easy posix.odirect)
[ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
echo -n ""
}
function setup_ior_hard {
local params
io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
val="$(get_ini_param ior-hard hintsFileName)"
[ -n "$val" ] && params+=" -U $val"
val="$(get_ini_param ior-hard posix.odirect)"
[ "$val" = "True" ] && params+=" --posix.odirect"
val="$(get_ini_param ior-easy verbosity)"
if [ -n "$val" ]; then
for i in $(seq $val); do
params+=" -v"
done
fi
io500_ior_hard_api_specific_options="$params"
echo -n ""
}
function setup_mdt_hard {
val=$(get_ini_param mdtest-hard n 1000000)
[ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
io500_mdtest_hard_api_specific_options=""
echo -n ""
}
function setup_find {
val="$(get_ini_param find external-script)"
[ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
io500_find_cmd="$val"
# uses stonewalling, run pfind
io500_find_cmd_args="$(get_ini_param find external-extra-args)"
echo -n ""
}
function setup_mdreal {
echo -n ""
}
function run_benchmarks {
local app_first=$((RANDOM % 100))
local app_rc=0
# run the app and C version in random order to try and avoid bias
(( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?
# Important: source the io500_fixed.sh script. Do not change it. If you
# discover a need to change it, please email the mailing list to discuss.
source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
tee $io500_result_dir/io-500-summary.$timestamp.txt
(( $app_first >= 50 )) && return $app_rc
echo "The io500.sh was run"
echo
echo "Running the C version of the benchmark now"
# run the app and C version in random order to try and avoid bias
$io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}
create_tarball() {
local sourcedir=$(dirname $io500_result_dir)
local fname=$(basename ${io500_result_dir%-scr})
local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz
cp -v $0 $io500_ini $io500_result_dir
tar czf $tarball -C $sourcedir $fname-{app,scr}
echo "Created result tarball $tarball"
}
# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
# UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
# EXAMPLE:
# io500_info_system_name='xxx'
# DO NOT ADD IT HERE
:
}
main
- ior_easy_read
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Tue Jul 7 21:57:06 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy/stonewall
Machine : Linux io500-c1
TestID : 0
StartTime : Tue Jul 7 21:57:06 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy
FS : 49.5 TiB Used FS: 33.3% Inodes: 93.0 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy/ior_file_easy
access : file-per-process
type : independent
segments : 1
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 10
tasks : 360
clients per node : 36
repetitions : 1
xfersize : 2 MiB
blocksize : 9.46 TiB
aggregate filesize : 3405.76 TiB
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
WARNING: Expected aggregate file size = 3744674611200000.
WARNING: Stat() of aggregate file size = 15104024248320.
WARNING: Using actual aggregate bytes moved = 15104024248320.
read 77436 38773 0.000037 10158080000 2048.00 0.264477 185.75 0.002548 186.02 0
Max Read: 77435.72 MiB/sec (81197.24 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
read 77435.72 77435.72 77435.72 0.00 38717.86 38717.86 38717.86 0.00 186.01648 NA NA 0 360 36 1 1 1 1 0 0 1 10401873920000 2097152 14404320.0 CEPHFS 0
Finished : Tue Jul 7 22:00:12 2020
- ior_easy_write
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Tue Jul 7 21:29:51 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine : Linux io500-c1
TestID : 0
StartTime : Tue Jul 7 21:29:51 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy
FS : 50.1 TiB Used FS: 28.7% Inodes: 3.6 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_easy/ior_file_easy
access : file-per-process
type : independent
segments : 1
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 10
tasks : 360
clients per node : 36
repetitions : 1
xfersize : 2 MiB
blocksize : 9.46 TiB
aggregate filesize : 3405.76 TiB
stonewallingTime : 300
stoneWallingWearOut : 1
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
stonewalling pairs accessed min: 11884 max: 20006 -- min data: 23.2 GiB mean data: 31.7 GiB time: 301.4s
WARNING: Expected aggregate file size = 3744674611200000.
WARNING: Stat() of aggregate file size = 15104024248320.
WARNING: Using actual aggregate bytes moved = 15104024248320.
WARNING: maybe caused by deadlineForStonewalling
write 37125 18565 0.000076 10158080000 2048.00 0.047559 387.94 0.002404 387.99 0
Max Write: 37125.48 MiB/sec (38928.89 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
write 37125.48 37125.48 37125.48 0.00 18562.74 18562.74 18562.74 0.00 387.99011 301.43 38731.90 0 360 36 1 1 1 1 0 0 1 10401873920000 2097152 14404320.0 CEPHFS 0
Finished : Tue Jul 7 21:36:19 2020
- ior_hard_read
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Tue Jul 7 22:01:51 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard/stonewall
Machine : Linux io500-c1
TestID : 0
StartTime : Tue Jul 7 22:01:51 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard
FS : 49.5 TiB Used FS: 33.3% Inodes: 93.0 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard/IOR_file
access : single-shared-file
type : independent
segments : 1000000
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 10
tasks : 360
clients per node : 36
repetitions : 1
xfersize : 47008 bytes
blocksize : 47008 bytes
aggregate filesize : 15.39 TiB
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
WARNING: Expected aggregate file size = 16922880000000.
WARNING: Stat() of aggregate file size = 2958982490880.
WARNING: Using actual aggregate bytes moved = 2958982490880.
read 19538 436453 144.22 45.91 45.91 0.210038 144.22 0.001951 144.43 0
Max Read: 19537.96 MiB/sec (20487.04 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
read 19537.96 19537.96 19537.96 0.00 435820.27 435820.27 435820.27 0.00 144.43192 NA NA 0 360 36 1 0 1 1 0 0 1000000 47008 47008 2821905.5 CEPHFS 0
Finished : Tue Jul 7 22:04:15 2020
- ior_hard_write
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Tue Jul 7 21:43:52 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine : Linux io500-c1
TestID : 0
StartTime : Tue Jul 7 21:43:52 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard
FS : 49.6 TiB Used FS: 32.4% Inodes: 83.7 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/ior_hard/IOR_file
access : single-shared-file
type : independent
segments : 1000000
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 10
tasks : 360
clients per node : 36
repetitions : 1
xfersize : 47008 bytes
blocksize : 47008 bytes
aggregate filesize : 15.39 TiB
stonewallingTime : 300
stoneWallingWearOut : 1
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
stonewalling pairs accessed min: 174272 max: 174851 -- min data: 7.6 GiB mean data: 7.6 GiB time: 300.3s
WARNING: Expected aggregate file size = 16922880000000.
WARNING: Stat() of aggregate file size = 2958982490880.
WARNING: Using actual aggregate bytes moved = 2958982490880.
WARNING: maybe caused by deadlineForStonewalling
write 9356 208979 300.75 45.91 45.91 0.420023 301.21 0.002243 301.63 0
Max Write: 9355.55 MiB/sec (9810.01 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
write 9355.55 9355.55 9355.55 0.00 208688.08 208688.08 208688.08 0.00 301.62892 300.33 9376.76 0 360 36 1 0 1 1 0 0 1000000 47008 47008 2821905.5 CEPHFS 0
Finished : Tue Jul 7 21:48:54 2020
- mdtest_easy_delete
-
-- started at 07/07/2020 22:05:27 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 49.5 TiB Used FS: 33.3% Inodes: 88.8 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 86400000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 123455.169 123454.716 123455.044 0.080
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.230 0.230 0.230 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 699.852 699.849 699.850 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 4.349 4.349 4.349 0.000
-- finished at 07/07/2020 22:17:12 --
- mdtest_easy_stat
-
-- started at 07/07/2020 22:00:14 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 49.5 TiB Used FS: 33.3% Inodes: 88.8 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 86400000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 903111.863 903077.916 903108.192 2.379
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 95.673 95.669 95.670 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/07/2020 22:01:49 --
- mdtest_easy_write
-
-- started at 07/07/2020 21:36:20 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 45.9 TiB Used FS: 60.3% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 86400000 files
Continue stonewall hit min: 135775 max: 240000 avg: 229118.3
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 191979.867 191978.287 191979.632 0.200
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 274209.404 NA
Tree creation : 10.320 10.320 10.320 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 450.051 450.047 450.048 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 300.801 NA
Tree creation : 0.097 0.097 0.097 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/07/2020 21:43:50 --
- mdtest_hard_delete
-
-- started at 07/07/2020 22:19:01 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 49.5 TiB Used FS: 33.3% Inodes: 6.5 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 36000000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 18814.221 18814.098 18814.197 0.015
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 15.164 15.164 15.164 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 359.501 359.498 359.499 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.066 0.066 0.066 0.000
-- finished at 07/07/2020 22:25:02 --
- mdtest_hard_read
-
-- started at 07/07/2020 22:17:43 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 49.5 TiB Used FS: 33.3% Inodes: 6.5 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 36000000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 87512.294 87511.531 87512.012 0.136
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 77.289 77.288 77.289 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/07/2020 22:19:00 --
- mdtest_hard_stat
-
-- started at 07/07/2020 22:04:16 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 49.5 TiB Used FS: 33.3% Inodes: 88.8 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 36000000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 97399.144 97396.580 97398.669 0.244
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 69.445 69.443 69.443 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/07/2020 22:05:26 --
- mdtest_hard_write
-
-- started at 07/07/2020 21:48:55 --
mdtest-3.3.0+dev was launched with 360 total task(s) on 10 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.07-20.27.19-scr
FS: 49.2 TiB Used FS: 35.4% Inodes: 82.4 Mi Used Inodes: 100.0%
Nodemap: 111111111111111111111111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 36 for each phase.
360 tasks, 36000000 files
Continue stonewall hit min: 13225 max: 18788 avg: 15286.1
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 17186.451 17186.186 17186.385 0.054
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 18258.087 NA
Tree creation : 6.920 6.920 6.920 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 393.553 393.547 393.549 0.001
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 301.400 NA
Tree creation : 0.145 0.145 0.145 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/07/2020 21:55:29 --
- result_summary
-
[RESULT] BW phase 1 ior_easy_write 36.255 GiB/s : time 387.94 seconds
[RESULT] IOPS phase 1 mdtest_easy_write 191.980 kiops : time 450.05 seconds
[RESULT] BW phase 2 ior_hard_write 9.137 GiB/s : time 301.21 seconds
[RESULT] IOPS phase 2 mdtest_hard_write 17.187 kiops : time 393.55 seconds
[RESULT] IOPS phase 3 find 965.790 kiops : time 96.46 seconds
[RESULT] BW phase 3 ior_easy_read 75.621 GiB/s : time 185.75 seconds
[RESULT] IOPS phase 4 mdtest_easy_stat 903.112 kiops : time 95.67 seconds
[RESULT] BW phase 4 ior_hard_read 19.080 GiB/s : time 144.22 seconds
[RESULT] IOPS phase 5 mdtest_hard_stat 97.399 kiops : time 69.44 seconds
[RESULT] IOPS phase 6 mdtest_easy_delete 123.455 kiops : time 699.85 seconds
[RESULT] IOPS phase 7 mdtest_hard_read 87.512 kiops : time 77.29 seconds
[RESULT] IOPS phase 8 mdtest_hard_delete 18.814 kiops : time 390.91 seconds
[SCORE] Bandwidth 26.2933 GiB/s : IOPS 124.297 kiops : TOTAL 57.168