Officinalis (1 Client)

Institution
Client Procs Per Node
Client Operating System CentOS
Client Operating System Version CentOS Linux release 8.0.1905 (Core)
Client Kernel Version 4.18.0-147.6.el8.x86_64

DATA SERVER

Storage Type NVMe
Volatile Memory 384GB
Storage Interface NVMe
Network 4x25GbE
Software Version
OS Version CentOS Linux release 8.0.1905 (Core)

INFORMATION

Client Nodes 1
Client Total Procs 112
Metadata Nodes 8
Metadata Storage Devices 8
Data Nodes 8
Data Storage Devices 8

METADATA

Easy Write 123.68 kIOP/s
Easy Stat 359.29 kIOP/s
Easy Delete 100.24 kIOP/s
Hard Write 16.19 kIOP/s
Hard Read 92.43 kIOP/s
Hard Stat 101.85 kIOP/s
Hard Delete 16.42 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.

function setup_paths {
  # Set the paths to the binaries and how to launch MPI jobs.
  # If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpirun"
  io500_mpiargs="-npernode 112 --hostfile /home/nhm/io500-app/hosts"
}

function setup_directories {
  local workdir
  local resultdir
  local ts

  # set directories where benchmark files are created and where the results go
  # If you want to set up stripe tuning on your output directories or anything
  # similar, then this is the right place to do it.  This creates the output
  # directories for both the app run and the script run.

  # Ceph Stuff
  MAX_MDS=65
  MPI_RANKS=112
  CEPH_MDTEST_EASY_PINNING=distributed # none, export-rr, distributed, random
  CEPH_MDTEST_EASY_PINNING_RANDOM=1.0 

  MDTEST_EASY_FILES_PER_PROC=400000
  MDTEST_HARD_FILES_PER_PROC=100000
  MDTEST_HARD_FILES=$(( ${MPI_RANKS} * ${MDTEST_HARD_FILES_PER_PROC} ))


  timestamp=$(date +%Y.%m.%d-%H.%M.%S)           # create a uniquifier
  [ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # directory where the data will be stored
  workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
  io500_workdir=$workdir-scr
  [ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # the directory where the output results will be kept
  resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
  io500_result_dir=$resultdir-scr

  mkdir -p $workdir-{scr,app} $resultdir-{scr,app}

  for pf in scr app
  do
    wd="$workdir-${pf}"
    rd="$resultdir-${pf}"

    mdt_easy_parent="${wd}/mdt_easy/test-dir.0-0"
    mdt_hard_parent="${wd}/mdt_hard/test-dir.0-0"
    mdt_hard_dir="${wd}/mdt_hard/test-dir.0-0/mdtest_tree.0"
    if [ $pf == "app" ]
    then
      mdt_easy_parent="${wd}/mdtest-easy/test-dir.0-0"
      mdt_hard_parent="${wd}/mdtest-hard/test-dir.0-0"
      mdt_hard_dir="${wd}/mdtest-hard/test-dir.0-0/mdtest_tree.0"
    fi

    # Create the result directory and "top-level" mdt parent directories
    # (Should be legal based on io500 submission rules #11)
    mkdir -p "${rd}"
    mkdir -p "${mdt_easy_parent}"
    mkdir -p "${mdt_hard_parent}"

    # *** Per-Directory Round-Robin Pinning (Minus Auth MDS) ***
    # This option likely violates rule #11 by creating the mdtest directories
    # (but directory creation time doesn't actually affect the score?)
    # Only use for testing currently.
    if [[ "${CEPH_MDTEST_EASY_PINNING}" == "export-rr" ]]
    then
      setfattr -n ceph.dir.pin -v 0 "${mdt_easy_parent}"
      MOD=$(( ${MAX_MDS} - 1 ))
      MAXRANK=$(( ${MPI_RANKS} ))
      for (( RANK=0; RANK<${MAXRANK}; RANK++ ))
      do
        n=0
        if [ ${MOD} -gt 0 ]
        then
          n=$(( ${RANK}%${MOD} + 1 ))
        fi
        echo "MPI rank ${RANK} assigned to mds rank ${n}"
        mkdir -p "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
        setfattr -n ceph.dir.pin -v ${n} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
#        setfattr -n ceph.dir.expected_files -v ${MDTEST_EASY_FILES_PER_PROC} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
      done
    # *** Distributed Pinning ***
    # Shouldn't violate rule #11 because we are working on the parent dir
    elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "distributed" ]]
    then
      setfattr -n ceph.dir.pin.distributed -v 1 ${mdt_easy_parent}
    # *** Random Pinning ***
    # Shouldn't violate rule #11 because we are working on the parent dir
    elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "random" ]]
    then
      setfattr -n ceph.dir.pin.random -v ${CEPH_MDTEST_EASY_PINNING_RANDOM} ${mdt_easy_parent}
    fi;

    # *** Experimental Hard Directory expected_files and "hot" hints ***
    # This option likely violates rule #11 because it pre-creates the (single!)
    # mdtest subdirectory workdir.
    # Only use for testing currently with https://github.com/ceph/ceph/pull/34574.
#    echo "Setting expected mdtest hard files to ${MDTEST_HARD_FILES}"
#    mkdir -p "${mdt_hard_dir}"
#    setfattr -n ceph.dir.expected_files -v ${MDTEST_HARD_FILES} "${mdt_hard_dir}"
#    setfattr -n ceph.dir.hot -v 1 "${mdt_hard_dir}"
  done
}

# you should not edit anything below this line
set -eo pipefail  # better error handling

io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
  echo "error: ini file must be specified.  usage: $0 "
  exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
  echo "error: ini file '$io500_ini' not found or empty"
  exit 2
fi

function get_ini_section_param() {
  local section="$1"
  local param="$2"
  local inside=false

  while read LINE; do
    LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
    $inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
    [[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
    ! $inside && continue
    #echo $LINE | awk -F = "/^$param/ { print \$2 }"
    if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
      # echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
      echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
      return
    fi
  done < $io500_ini
  echo ""
}

function get_ini_param() {
  local section="$1"
  local param="$2"
  local default="$3"

  # try and get the most-specific param first, then more generic params
  val=$(get_ini_section_param $section $param)
  [ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
  [ -n "$val" ] || val="$(get_ini_section_param global $param)"

  echo "${val:-$default}" |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}

function get_ini_run_param() {
  local section="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param $section noRun)

  # logic is reversed from "noRun=TRUE" to "run=False"
  [[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}

function get_ini_global_param() {
  local param="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param global $param |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')

  echo "${val:-$default}"
}

# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"

# to run this benchmark, find and edit each of these functions.  Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional

  run_benchmarks

  if [[ ! -s "system-information.txt" ]]; then
    echo "Warning: please create a system-information.txt description by"
    echo "copying the information from https://vi4io.org/io500-info-creator/"
  else
    cp "system-information.txt" $io500_result_dir
  fi

  create_tarball
}

function setup_ior_easy {
  local params

  io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
  val=$(get_ini_param ior-easy API POSIX)
  [ -n "$val" ] && params+=" -a $val"
  val="$(get_ini_param ior-easy transferSize)"
  [ -n "$val" ] && params+=" -t $val"
  val="$(get_ini_param ior-easy hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-easy posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_easy_params="$params"
  echo -n ""
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves

  val=$(get_ini_param mdtest-easy n 1000000)
  [ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
  val=$(get_ini_param mdtest-easy API POSIX)
  [ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
  val=$(get_ini_param mdtest-easy posix.odirect)
  [ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
  echo -n ""
}

function setup_ior_hard {
  local params

  io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
  io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
  val="$(get_ini_param ior-hard hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-hard posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_hard_api_specific_options="$params"
  echo -n ""
}

function setup_mdt_hard {
  val=$(get_ini_param mdtest-hard n 1000000)
  [ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
  io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
  io500_mdtest_hard_api_specific_options=""
  echo -n ""
}

function setup_find {
  val="$(get_ini_param find external-script)"
  [ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
    io500_find_cmd="$val"
  # uses stonewalling, run pfind
  io500_find_cmd_args="$(get_ini_param find external-extra-args)"
  echo -n ""
}

function setup_mdreal {
  echo -n ""
}

function run_benchmarks {
  local app_first=$((RANDOM % 100))
  local app_rc=0

  # run the app and C version in random order to try and avoid bias
  (( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?

  # Important: source the io500_fixed.sh script.  Do not change it. If you
  # discover a need to change it, please email the mailing list to discuss.
  source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
    tee $io500_result_dir/io-500-summary.$timestamp.txt

  (( $app_first >= 50 )) && return $app_rc

  echo "The io500.sh was run"
  echo
  echo "Running the C version of the benchmark now"
  # run the app and C version in random order to try and avoid bias
  $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}

create_tarball() {
  local sourcedir=$(dirname $io500_result_dir)
  local fname=$(basename ${io500_result_dir%-scr})
  local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz

  cp -v $0 $io500_ini $io500_result_dir
  tar czf $tarball -C $sourcedir $fname-{app,scr}
  echo "Created result tarball $tarball"
}

# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
  # UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
  # EXAMPLE:
  # io500_info_system_name='xxx'
  # DO NOT ADD IT HERE
  :
}

main
ior_easy_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul 13 18:01:04 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy/stonewall
Machine             : Linux o01
TestID              : 0
StartTime           : Mon Jul 13 18:01:04 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy
FS                  : 430.8 TiB   Used FS: 1.9%   Inodes: 49.6 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 1
tasks               : 112
clients per node    : 112
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 1059.57 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 1165009879040000.
WARNING: Stat() of aggregate file size      = 6638677262336.
WARNING: Using actual aggregate bytes moved = 6638677262336.
read      10474      5238       0.000122    10158080000 2048.00    0.099868   604.36     0.001199   604.46     0   
Max Read:  10474.09 MiB/sec (10982.88 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        10474.09   10474.09   10474.09       0.00    5237.05    5237.05    5237.05       0.00  604.45679         NA            NA     0    112 112    1   1     1        1         0    0      1 10401873920000  2097152 6331136.0 CEPHFS      0
Finished            : Mon Jul 13 18:11:09 2020
ior_easy_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul 13 17:31:03 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux o01
TestID              : 0
StartTime           : Mon Jul 13 17:31:03 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy
FS                  : 430.7 TiB   Used FS: 0.0%   Inodes: 0.0 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 1
tasks               : 112
clients per node    : 112
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 1059.57 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 7478 max: 28264 -- min data: 14.6 GiB mean data: 27.5 GiB time: 300.7s
WARNING: Expected aggregate file size       = 1165009879040000.
WARNING: Stat() of aggregate file size      = 6638677262336.
WARNING: Using actual aggregate bytes moved = 6638677262336.
WARNING: maybe caused by deadlineForStonewalling
write     10499      5250       0.000101    10158080000 2048.00    0.039853   602.95     0.012662   603.01     0   
Max Write: 10499.31 MiB/sec (11009.32 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       10499.31   10499.31   10499.31       0.00    5249.65    5249.65    5249.65       0.00  603.00507     300.73      10478.19     0    112 112    1   1     1        1         0    0      1 10401873920000  2097152 6331136.0 CEPHFS      0
Finished            : Mon Jul 13 17:41:07 2020
ior_hard_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul 13 18:14:59 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard/stonewall
Machine             : Linux o01
TestID              : 0
StartTime           : Mon Jul 13 18:14:59 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard
FS                  : 430.8 TiB   Used FS: 1.9%   Inodes: 49.6 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 1000000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 1
tasks               : 112
clients per node    : 112
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 4.79 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 5264896000000.
WARNING: Stat() of aggregate file size      = 2461386264064.
WARNING: Using actual aggregate bytes moved = 2461386264064.
read      7692       171597     305.13      45.91      45.91      0.046746   305.14     0.001318   305.19     0   
Max Read:  7691.54 MiB/sec (8065.16 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read         7691.54    7691.54    7691.54       0.00  171569.97  171569.97  171569.97       0.00  305.18749         NA            NA     0    112 112    1   0     1        1         0    0 1000000    47008    47008 2347360.8 CEPHFS      0
Finished            : Mon Jul 13 18:20:04 2020
ior_hard_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Mon Jul 13 17:47:21 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux o01
TestID              : 0
StartTime           : Mon Jul 13 17:47:21 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard
FS                  : 430.8 TiB   Used FS: 1.4%   Inodes: 41.4 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 1000000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 1
tasks               : 112
clients per node    : 112
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 4.79 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 467272 max: 467509 -- min data: 20.5 GiB mean data: 20.5 GiB time: 300.0s
WARNING: Expected aggregate file size       = 5264896000000.
WARNING: Stat() of aggregate file size      = 2461386264064.
WARNING: Using actual aggregate bytes moved = 2461386264064.
WARNING: maybe caused by deadlineForStonewalling
write     7813       174323     300.12      45.91      45.91      0.072054   300.37     0.010930   300.45     0   
Max Write: 7812.81 MiB/sec (8192.33 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write        7812.81    7812.81    7812.81       0.00  174275.22  174275.22  174275.22       0.00  300.45010     300.02       7822.41     0    112 112    1   0     1        1         0    0 1000000    47008    47008 2347360.8 CEPHFS      0
Finished            : Mon Jul 13 17:52:21 2020
mdtest_easy_delete
-- started at 07/13/2020 18:21:06 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_easy' '-n' '400000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.9%   Inodes: 1.3 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 44800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :     100239.187     100235.551     100238.818          0.593
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.578          0.578          0.578          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        446.947        446.931        446.933          0.003
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          1.730          1.730          1.730          0.000
-- finished at 07/13/2020 18:28:35 --

mdtest_easy_stat
-- started at 07/13/2020 18:12:51 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_easy' '-n' '400000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.9%   Inodes: 1.3 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 44800000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :     359293.840     359230.817     359290.573          6.288
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :        124.711        124.689        124.690          0.002
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/13/2020 18:14:56 --

mdtest_easy_write
-- started at 07/13/2020 17:41:13 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_easy' '-n' '400000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.4%   Inodes: 0.0 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 44800000 files
Continue stonewall hit min: 314501 max: 400000 avg: 384594.8 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :     123683.495     123679.038     123683.357          0.583
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA     143397.994             NA
   Tree creation             :         18.894         18.894         18.894          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        362.228        362.215        362.215          0.002
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.385             NA
   Tree creation             :          0.053          0.053          0.053          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/13/2020 17:47:16 --

mdtest_hard_delete
-- started at 07/13/2020 18:30:07 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.9%   Inodes: 1.3 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 11200000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :      16422.277      16422.255      16422.266          0.006
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          3.404          3.404          3.404          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        316.251        316.250        316.251          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.294          0.294          0.294          0.000
-- finished at 07/13/2020 18:35:25 --

mdtest_hard_read
-- started at 07/13/2020 18:29:08 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.9%   Inodes: 1.3 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 11200000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :      92433.744      92430.277      92431.939          1.516
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :         56.189         56.187         56.188          0.001
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/13/2020 18:30:04 --

mdtest_hard_stat
-- started at 07/13/2020 18:20:11 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.9%   Inodes: 1.3 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 11200000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :     101851.659     101830.396     101851.186          1.996
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         51.002         50.991         50.992          0.001
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/13/2020 18:21:02 --

mdtest_hard_write
-- started at 07/13/2020 17:52:23 --

mdtest-3.3.0+dev was launched with 112 total task(s) on 1 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.13-17.30.55-scr
FS: 430.8 TiB   Used FS: 1.9%   Inodes: 1.3 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
V-0: Rank   0 Line  2166 Shifting ranks by 1 for each phase.
112 tasks, 11200000 files
Continue stonewall hit min: 40050 max: 46371 avg: 43219.7 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :      16188.114      16188.063      16188.089          0.021
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA      16136.334             NA
   Tree creation             :         22.845         22.845         22.845          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        320.826        320.825        320.826          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        299.981             NA
   Tree creation             :          0.044          0.044          0.044          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/13/2020 17:57:44 --

result_summary
[RESULT] BW   phase 1            ior_easy_write               10.253 GiB/s : time 602.95 seconds
[RESULT] IOPS phase 1         mdtest_easy_write              123.683 kiops : time 362.23 seconds
[RESULT] BW   phase 2            ior_hard_write                7.630 GiB/s : time 300.37 seconds
[RESULT] IOPS phase 2         mdtest_hard_write               16.188 kiops : time 320.83 seconds
[RESULT] IOPS phase 3                      find              255.650 kiops : time 195.55 seconds
[RESULT] BW   phase 3             ior_easy_read               10.228 GiB/s : time 604.36 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat              359.294 kiops : time 124.71 seconds
[RESULT] BW   phase 4             ior_hard_read                7.512 GiB/s : time 305.14 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat              101.852 kiops : time  51.00 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete              100.239 kiops : time 446.95 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               92.434 kiops : time  56.19 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete               16.422 kiops : time 348.49 seconds
[SCORE] Bandwidth 8.80499 GiB/s : IOPS 85.4789 kiops : TOTAL 27.4343