- io500
-
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.
function setup_paths {
# Set the paths to the binaries and how to launch MPI jobs.
# If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
io500_ior_cmd=$PWD/bin/ior
io500_mdtest_cmd=$PWD/bin/mdtest
io500_mdreal_cmd=$PWD/bin/md-real-io
io500_mpirun="mpirun"
io500_mpiargs="-npernode 32 --hostfile /home/nhm/io500-app/hosts"
}
function setup_directories {
local workdir
local resultdir
local ts
# set directories where benchmark files are created and where the results go
# If you want to set up stripe tuning on your output directories or anything
# similar, then this is the right place to do it. This creates the output
# directories for both the app run and the script run.
# Ceph Stuff
MAX_MDS=65
MPI_RANKS=256
CEPH_MDTEST_EASY_PINNING=distributed # none, export-rr, distributed, random
CEPH_MDTEST_EASY_PINNING_RANDOM=1.0
# MDTEST_EASY_FILES_PER_PROC=400000
# MDTEST_HARD_FILES_PER_PROC=100000
# MDTEST_HARD_FILES=$(( ${MPI_RANKS} * ${MDTEST_HARD_FILES_PER_PROC} ))
timestamp=$(date +%Y.%m.%d-%H.%M.%S) # create a uniquifier
[ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
ts="$timestamp" || ts="io500"
# directory where the data will be stored
workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
io500_workdir=$workdir-scr
[ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
ts="$timestamp" || ts="io500"
# the directory where the output results will be kept
resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
io500_result_dir=$resultdir-scr
mkdir -p $workdir-{scr,app} $resultdir-{scr,app}
for pf in scr app
do
wd="$workdir-${pf}"
rd="$resultdir-${pf}"
mdt_easy_parent="${wd}/mdt_easy/test-dir.0-0"
mdt_hard_parent="${wd}/mdt_hard/test-dir.0-0"
mdt_hard_dir="${wd}/mdt_hard/test-dir.0-0/mdtest_tree.0"
if [ $pf == "app" ]
then
mdt_easy_parent="${wd}/mdtest-easy/test-dir.0-0"
mdt_hard_parent="${wd}/mdtest-hard/test-dir.0-0"
mdt_hard_dir="${wd}/mdtest-hard/test-dir.0-0/mdtest_tree.0"
fi
# Create the result directory and "top-level" mdt parent directories
# (Should be legal based on io500 submission rules #11)
mkdir -p "${rd}"
mkdir -p "${mdt_easy_parent}"
mkdir -p "${mdt_hard_parent}"
# *** Per-Directory Round-Robin Pinning (Minus Auth MDS) ***
# This option likely violates rule #11 by creating the mdtest directories
# (but directory creation time doesn't actually affect the score?)
# Only use for testing currently.
if [[ "${CEPH_MDTEST_EASY_PINNING}" == "export-rr" ]]
then
setfattr -n ceph.dir.pin -v 0 "${mdt_easy_parent}"
MOD=$(( ${MAX_MDS} - 1 ))
MAXRANK=$(( ${MPI_RANKS} ))
for (( RANK=0; RANK<${MAXRANK}; RANK++ ))
do
n=0
if [ ${MOD} -gt 0 ]
then
n=$(( ${RANK}%${MOD} + 1 ))
fi
echo "MPI rank ${RANK} assigned to mds rank ${n}"
mkdir -p "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
setfattr -n ceph.dir.pin -v ${n} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
# setfattr -n ceph.dir.expected_files -v ${MDTEST_EASY_FILES_PER_PROC} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
done
# *** Distributed Pinning ***
# Shouldn't violate rule #11 because we are working on the parent dir
elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "distributed" ]]
then
setfattr -n ceph.dir.pin.distributed -v 1 ${mdt_easy_parent}
# *** Random Pinning ***
# Shouldn't violate rule #11 because we are working on the parent dir
elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "random" ]]
then
setfattr -n ceph.dir.pin.random -v ${CEPH_MDTEST_EASY_PINNING_RANDOM} ${mdt_easy_parent}
fi;
# *** Experimental Hard Directory expected_files and "hot" hints ***
# This option likely violates rule #11 because it pre-creates the (single!)
# mdtest subdirectory workdir.
# Only use for testing currently with https://github.com/ceph/ceph/pull/34574.
# echo "Setting expected mdtest hard files to ${MDTEST_HARD_FILES}"
# mkdir -p "${mdt_hard_dir}"
# setfattr -n ceph.dir.expected_files -v ${MDTEST_HARD_FILES} "${mdt_hard_dir}"
# setfattr -n ceph.dir.hot -v 1 "${mdt_hard_dir}"
done
}
# you should not edit anything below this line
set -eo pipefail # better error handling
io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
echo "error: ini file must be specified. usage: $0 "
exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
echo "error: ini file '$io500_ini' not found or empty"
exit 2
fi
function get_ini_section_param() {
local section="$1"
local param="$2"
local inside=false
while read LINE; do
LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
$inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
[[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
! $inside && continue
#echo $LINE | awk -F = "/^$param/ { print \$2 }"
if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
# echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
return
fi
done < $io500_ini
echo ""
}
function get_ini_param() {
local section="$1"
local param="$2"
local default="$3"
# try and get the most-specific param first, then more generic params
val=$(get_ini_section_param $section $param)
[ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
[ -n "$val" ] || val="$(get_ini_section_param global $param)"
echo "${val:-$default}" |
sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}
function get_ini_run_param() {
local section="$1"
local default="$2"
local val
val=$(get_ini_section_param $section noRun)
# logic is reversed from "noRun=TRUE" to "run=False"
[[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}
function get_ini_global_param() {
local param="$1"
local default="$2"
local val
val=$(get_ini_section_param global $param |
sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')
echo "${val:-$default}"
}
# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"
# to run this benchmark, find and edit each of these functions. Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
setup_directories
setup_paths
setup_ior_easy # required if you want a complete score
setup_ior_hard # required if you want a complete score
setup_mdt_easy # required if you want a complete score
setup_mdt_hard # required if you want a complete score
setup_find # required if you want a complete score
setup_mdreal # optional
run_benchmarks
if [[ ! -s "system-information.txt" ]]; then
echo "Warning: please create a system-information.txt description by"
echo "copying the information from https://vi4io.org/io500-info-creator/"
else
cp "system-information.txt" $io500_result_dir
fi
create_tarball
}
function setup_ior_easy {
local params
io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
val=$(get_ini_param ior-easy API POSIX)
[ -n "$val" ] && params+=" -a $val"
val="$(get_ini_param ior-easy transferSize)"
[ -n "$val" ] && params+=" -t $val"
val="$(get_ini_param ior-easy hintsFileName)"
[ -n "$val" ] && params+=" -U $val"
val="$(get_ini_param ior-easy posix.odirect)"
[ "$val" = "True" ] && params+=" --posix.odirect"
val="$(get_ini_param ior-easy verbosity)"
if [ -n "$val" ]; then
for i in $(seq $val); do
params+=" -v"
done
fi
io500_ior_easy_params="$params"
echo -n ""
}
function setup_mdt_easy {
io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
val=$(get_ini_param mdtest-easy n 1000000)
[ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
val=$(get_ini_param mdtest-easy API POSIX)
[ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
val=$(get_ini_param mdtest-easy posix.odirect)
[ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
echo -n ""
}
function setup_ior_hard {
local params
io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
val="$(get_ini_param ior-hard hintsFileName)"
[ -n "$val" ] && params+=" -U $val"
val="$(get_ini_param ior-hard posix.odirect)"
[ "$val" = "True" ] && params+=" --posix.odirect"
val="$(get_ini_param ior-easy verbosity)"
if [ -n "$val" ]; then
for i in $(seq $val); do
params+=" -v"
done
fi
io500_ior_hard_api_specific_options="$params"
echo -n ""
}
function setup_mdt_hard {
val=$(get_ini_param mdtest-hard n 1000000)
[ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
io500_mdtest_hard_api_specific_options=""
echo -n ""
}
function setup_find {
val="$(get_ini_param find external-script)"
[ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
io500_find_cmd="$val"
# uses stonewalling, run pfind
io500_find_cmd_args="$(get_ini_param find external-extra-args)"
echo -n ""
}
function setup_mdreal {
echo -n ""
}
function run_benchmarks {
local app_first=$((RANDOM % 100))
local app_rc=0
# run the app and C version in random order to try and avoid bias
(( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?
# Important: source the io500_fixed.sh script. Do not change it. If you
# discover a need to change it, please email the mailing list to discuss.
source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
tee $io500_result_dir/io-500-summary.$timestamp.txt
(( $app_first >= 50 )) && return $app_rc
echo "The io500.sh was run"
echo
echo "Running the C version of the benchmark now"
# run the app and C version in random order to try and avoid bias
$io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}
create_tarball() {
local sourcedir=$(dirname $io500_result_dir)
local fname=$(basename ${io500_result_dir%-scr})
local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz
cp -v $0 $io500_ini $io500_result_dir
tar czf $tarball -C $sourcedir $fname-{app,scr}
echo "Created result tarball $tarball"
}
# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
# UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
# EXAMPLE:
# io500_info_system_name='xxx'
# DO NOT ADD IT HERE
:
}
main
- ior_easy_read
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Wed Jul 8 17:20:15 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy/stonewall
Machine : Linux o03
TestID : 0
StartTime : Wed Jul 8 17:20:15 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy
FS : 429.8 TiB Used FS: 7.0% Inodes: 71.9 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy/ior_file_easy
access : file-per-process
type : independent
segments : 1
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 8
tasks : 256
clients per node : 32
repetitions : 1
xfersize : 2 MiB
blocksize : 9.46 TiB
aggregate filesize : 2421.88 TiB
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
WARNING: Expected aggregate file size = 2662879723520000.
WARNING: Stat() of aggregate file size = 29182155292672.
WARNING: Using actual aggregate bytes moved = 29182155292672.
read 70073 35048 0.000080 10158080000 2048.00 0.127560 397.03 0.000742 397.16 0
Max Read: 70073.02 MiB/sec (73476.89 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
read 70073.02 70073.02 70073.02 0.00 35036.51 35036.51 35036.51 0.00 397.16102 NA NA 0 256 32 1 1 1 1 0 0 1 10401873920000 2097152 27830272.0 CEPHFS 0
Finished : Wed Jul 8 17:26:53 2020
- ior_easy_write
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Wed Jul 8 16:53:45 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine : Linux o03
TestID : 0
StartTime : Wed Jul 8 16:53:45 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy
FS : 430.7 TiB Used FS: 0.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_easy/ior_file_easy
access : file-per-process
type : independent
segments : 1
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 8
tasks : 256
clients per node : 32
repetitions : 1
xfersize : 2 MiB
blocksize : 9.46 TiB
aggregate filesize : 2421.88 TiB
stonewallingTime : 300
stoneWallingWearOut : 1
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
stonewalling pairs accessed min: 22324 max: 54356 -- min data: 43.6 GiB mean data: 80.8 GiB time: 303.2s
WARNING: Expected aggregate file size = 2662879723520000.
WARNING: Stat() of aggregate file size = 29182155292672.
WARNING: Using actual aggregate bytes moved = 29182155292672.
WARNING: maybe caused by deadlineForStonewalling
write 59851 29994 0.000067 10158080000 2048.00 1.07 463.93 0.000586 464.99 0
Max Write: 59850.73 MiB/sec (62758.04 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
write 59850.73 59850.73 59850.73 0.00 29925.36 29925.36 29925.36 0.00 464.99471 303.25 69816.09 0 256 32 1 1 1 1 0 0 1 10401873920000 2097152 27830272.0 CEPHFS 0
Finished : Wed Jul 8 17:01:31 2020
- ior_hard_read
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Wed Jul 8 17:28:21 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard/stonewall
Machine : Linux o03
TestID : 0
StartTime : Wed Jul 8 17:28:21 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard
FS : 429.8 TiB Used FS: 7.0% Inodes: 71.9 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard/IOR_file
access : single-shared-file
type : independent
segments : 1000000
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 8
tasks : 256
clients per node : 32
repetitions : 1
xfersize : 47008 bytes
blocksize : 47008 bytes
aggregate filesize : 10.94 TiB
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
WARNING: Expected aggregate file size = 12034048000000.
WARNING: Stat() of aggregate file size = 3840546078720.
WARNING: Using actual aggregate bytes moved = 3840546078720.
read 13939 316442 258.18 45.91 45.91 4.57 258.18 0.000232 262.75 0
Max Read: 13939.45 MiB/sec (14616.57 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
read 13939.45 13939.45 13939.45 0.00 310937.93 310937.93 310937.93 0.00 262.75289 NA NA 0 256 32 1 0 1 1 0 0 1000000 47008 47008 3662630.2 CEPHFS 0
Finished : Wed Jul 8 17:32:45 2020
- ior_hard_write
-
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began : Wed Jul 8 17:07:03 2020
Command line : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine : Linux o03
TestID : 0
StartTime : Wed Jul 8 17:07:03 2020
Path : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard
FS : 429.6 TiB Used FS: 6.2% Inodes: 62.4 Mi Used Inodes: 100.0%
Options:
api : CEPHFS
apiVersion :
test filename : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/ior_hard/IOR_file
access : single-shared-file
type : independent
segments : 1000000
ordering in a file : sequential
ordering inter file : constant task offset
task offset : 1
nodes : 8
tasks : 256
clients per node : 32
repetitions : 1
xfersize : 47008 bytes
blocksize : 47008 bytes
aggregate filesize : 10.94 TiB
stonewallingTime : 300
stoneWallingWearOut : 1
Results:
access bw(MiB/s) IOPS Latency(s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
------ --------- ---- ---------- ---------- --------- -------- -------- -------- -------- ----
stonewalling pairs accessed min: 277854 max: 319140 -- min data: 12.2 GiB mean data: 13.7 GiB time: 300.1s
WARNING: Expected aggregate file size = 12034048000000.
WARNING: Stat() of aggregate file size = 3840546078720.
WARNING: Using actual aggregate bytes moved = 3840546078720.
WARNING: maybe caused by deadlineForStonewalling
write 11019 246043 300.66 45.91 45.91 0.339009 332.06 0.000514 332.39 0
Max Write: 11018.91 MiB/sec (11554.17 MB/sec)
Summary of all tests:
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Max(OPs) Min(OPs) Mean(OPs) StdDev Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggs(MiB) API RefNum
write 11018.91 11018.91 11018.91 0.00 245791.53 245791.53 245791.53 0.00 332.39486 300.12 11963.24 0 256 32 1 0 1 1 0 0 1000000 47008 47008 3662630.2 CEPHFS 0
Finished : Wed Jul 8 17:12:36 2020
- mdtest_easy_delete
-
-- started at 07/08/2020 17:34:05 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.7 TiB Used FS: 7.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 61440000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 136790.360 136789.099 136790.219 0.192
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.354 0.354 0.354 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 449.159 449.154 449.155 0.001
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 2.821 2.821 2.821 0.000
-- finished at 07/08/2020 17:41:37 --
- mdtest_easy_stat
-
-- started at 07/08/2020 17:26:55 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.8 TiB Used FS: 7.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 61440000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 726507.544 726459.595 726499.107 11.280
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 84.575 84.569 84.570 0.001
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/08/2020 17:28:19 --
- mdtest_easy_write
-
-- started at 07/08/2020 17:01:33 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.2 TiB Used FS: 6.2% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 61440000 files
Continue stonewall hit min: 204890 max: 240000 avg: 239444.4
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 187381.636 187379.239 187381.373 0.342
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 204348.582 NA
Tree creation : 14.911 14.911 14.911 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 327.891 327.887 327.887 0.001
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 299.967 NA
Tree creation : 0.067 0.067 0.067 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/08/2020 17:07:01 --
- mdtest_hard_delete
-
-- started at 07/08/2020 17:43:28 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.8 TiB Used FS: 7.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 23616.360 23615.933 23616.306 0.072
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 4.153 4.153 4.153 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 256.207 256.202 256.203 0.001
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.241 0.241 0.241 0.000
-- finished at 07/08/2020 17:47:46 --
- mdtest_hard_read
-
-- started at 07/08/2020 17:42:08 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.8 TiB Used FS: 7.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 76116.245 76116.027 76116.136 0.055
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 0.000 0.000 0.000 0.000
File read : 79.491 79.491 79.491 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/08/2020 17:43:27 --
- mdtest_hard_stat
-
-- started at 07/08/2020 17:32:46 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.7 TiB Used FS: 7.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 77547.678 77544.354 77547.287 0.558
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 0.000 0.000 0.000 0.000
File stat : 78.027 78.024 78.024 0.001
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
Tree creation : 0.000 0.000 0.000 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/08/2020 17:34:04 --
- mdtest_hard_write
-
-- started at 07/08/2020 17:12:36 --
mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-16.53.40-scr
FS: 429.8 TiB Used FS: 7.0% Inodes: 0.0 Mi Used Inodes: 100.0%
Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank 0 Line 2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files
Continue stonewall hit min: 18976 max: 23635 avg: 20927.1
SUMMARY rate: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 16946.325 16946.151 16946.296 0.031
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 17801.252 NA
Tree creation : 9.429 9.429 9.429 0.000
Tree removal : 0.000 0.000 0.000 0.000
SUMMARY time: (of 1 iterations)
Operation Max Min Mean Std Dev
--------- --- --- ---- -------
File creation : 357.046 357.043 357.043 0.001
File stat : 0.000 0.000 0.000 0.000
File read : 0.000 0.000 0.000 0.000
File removal : 0.000 0.000 0.000 0.000
File create (stonewall) : NA NA 300.954 NA
Tree creation : 0.106 0.106 0.106 0.000
Tree removal : 0.000 0.000 0.000 0.000
-- finished at 07/08/2020 17:18:34 --
- result_summary
-
[RESULT] BW phase 1 ior_easy_write 58.448 GiB/s : time 463.93 seconds
[RESULT] IOPS phase 1 mdtest_easy_write 187.382 kiops : time 327.89 seconds
[RESULT] BW phase 2 ior_hard_write 10.761 GiB/s : time 332.06 seconds
[RESULT] IOPS phase 2 mdtest_hard_write 16.946 kiops : time 357.05 seconds
[RESULT] IOPS phase 3 find 671.710 kiops : time 100.48 seconds
[RESULT] BW phase 3 ior_easy_read 68.431 GiB/s : time 397.03 seconds
[RESULT] IOPS phase 4 mdtest_easy_stat 726.508 kiops : time 84.57 seconds
[RESULT] BW phase 4 ior_hard_read 13.612 GiB/s : time 258.18 seconds
[RESULT] IOPS phase 5 mdtest_hard_stat 77.548 kiops : time 78.03 seconds
[RESULT] IOPS phase 6 mdtest_easy_delete 136.790 kiops : time 449.16 seconds
[RESULT] IOPS phase 7 mdtest_hard_read 76.116 kiops : time 79.49 seconds
[RESULT] IOPS phase 8 mdtest_hard_delete 23.616 kiops : time 287.90 seconds
[SCORE] Bandwidth 27.6661 GiB/s : IOPS 114.503 kiops : TOTAL 56.2837