Officinalis

Institution
Client Procs Per Node
Client Operating System CentOS
Client Operating System Version CentOS Linux release 8.0.1905 (Core)
Client Kernel Version 4.18.0-147.6.el8.x86_64

DATA SERVER

Storage Type NVMe
Volatile Memory 384GB
Storage Interface NVMe
Network 4x25GbE
Software Version
OS Version CentOS Linux release 8.0.1905 (Core)

INFORMATION

Client Nodes 8
Client Total Procs 256
Metadata Nodes 8
Metadata Storage Devices 8
Data Nodes 8
Data Storage Devices 8

METADATA

Easy Write 157.85 kIOP/s
Easy Stat 665.21 kIOP/s
Easy Delete 117.31 kIOP/s
Hard Write 18.13 kIOP/s
Hard Read 69.56 kIOP/s
Hard Stat 66.61 kIOP/s
Hard Delete 15.29 kIOP/s

Submitted Files

io500
#!/bin/bash
#
# INSTRUCTIONS:
# This script takes its parameters from the same .ini file as io500 binary.

function setup_paths {
  # Set the paths to the binaries and how to launch MPI jobs.
  # If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$PWD/bin/ior
  io500_mdtest_cmd=$PWD/bin/mdtest
  io500_mdreal_cmd=$PWD/bin/md-real-io
  io500_mpirun="mpirun"
  io500_mpiargs="-npernode 32 --hostfile /home/nhm/io500-app/hosts"
}

function setup_directories {
  local workdir
  local resultdir
  local ts

  # set directories where benchmark files are created and where the results go
  # If you want to set up stripe tuning on your output directories or anything
  # similar, then this is the right place to do it.  This creates the output
  # directories for both the app run and the script run.

  # Ceph Stuff
  MAX_MDS=65
  MPI_RANKS=256
  CEPH_MDTEST_EASY_PINNING=distributed # none, export-rr, distributed, random
  CEPH_MDTEST_EASY_PINNING_RANDOM=1.0 

#  MDTEST_EASY_FILES_PER_PROC=400000
#  MDTEST_HARD_FILES_PER_PROC=100000
#  MDTEST_HARD_FILES=$(( ${MPI_RANKS} * ${MDTEST_HARD_FILES_PER_PROC} ))


  timestamp=$(date +%Y.%m.%d-%H.%M.%S)           # create a uniquifier
  [ $(get_ini_global_param timestamp-datadir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # directory where the data will be stored
  workdir=$(get_ini_global_param datadir $PWD/datafiles)/$ts
  io500_workdir=$workdir-scr
  [ $(get_ini_global_param timestamp-resultdir True) != "False" ] &&
	ts="$timestamp" || ts="io500"
  # the directory where the output results will be kept
  resultdir=$(get_ini_global_param resultdir $PWD/results)/$ts
  io500_result_dir=$resultdir-scr

  mkdir -p $workdir-{scr,app} $resultdir-{scr,app}

  for pf in scr app
  do
    wd="$workdir-${pf}"
    rd="$resultdir-${pf}"

    mdt_easy_parent="${wd}/mdt_easy/test-dir.0-0"
    mdt_hard_parent="${wd}/mdt_hard/test-dir.0-0"
    mdt_hard_dir="${wd}/mdt_hard/test-dir.0-0/mdtest_tree.0"
    if [ $pf == "app" ]
    then
      mdt_easy_parent="${wd}/mdtest-easy/test-dir.0-0"
      mdt_hard_parent="${wd}/mdtest-hard/test-dir.0-0"
      mdt_hard_dir="${wd}/mdtest-hard/test-dir.0-0/mdtest_tree.0"
    fi

    # Create the result directory and "top-level" mdt parent directories
    # (Should be legal based on io500 submission rules #11)
    mkdir -p "${rd}"
    mkdir -p "${mdt_easy_parent}"
    mkdir -p "${mdt_hard_parent}"

    # *** Per-Directory Round-Robin Pinning (Minus Auth MDS) ***
    # This option likely violates rule #11 by creating the mdtest directories
    # (but directory creation time doesn't actually affect the score?)
    # Only use for testing currently.
    if [[ "${CEPH_MDTEST_EASY_PINNING}" == "export-rr" ]]
    then
      setfattr -n ceph.dir.pin -v 0 "${mdt_easy_parent}"
      MOD=$(( ${MAX_MDS} - 1 ))
      MAXRANK=$(( ${MPI_RANKS} ))
      for (( RANK=0; RANK<${MAXRANK}; RANK++ ))
      do
        n=0
        if [ ${MOD} -gt 0 ]
        then
          n=$(( ${RANK}%${MOD} + 1 ))
        fi
        echo "MPI rank ${RANK} assigned to mds rank ${n}"
        mkdir -p "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
        setfattr -n ceph.dir.pin -v ${n} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
#        setfattr -n ceph.dir.expected_files -v ${MDTEST_EASY_FILES_PER_PROC} "${mdt_easy_parent}/mdtest_tree.${RANK}.0"
      done
    # *** Distributed Pinning ***
    # Shouldn't violate rule #11 because we are working on the parent dir
    elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "distributed" ]]
    then
      setfattr -n ceph.dir.pin.distributed -v 1 ${mdt_easy_parent}
    # *** Random Pinning ***
    # Shouldn't violate rule #11 because we are working on the parent dir
    elif [[ "${CEPH_MDTEST_EASY_PINNING}" == "random" ]]
    then
      setfattr -n ceph.dir.pin.random -v ${CEPH_MDTEST_EASY_PINNING_RANDOM} ${mdt_easy_parent}
    fi;

    # *** Experimental Hard Directory expected_files and "hot" hints ***
    # This option likely violates rule #11 because it pre-creates the (single!)
    # mdtest subdirectory workdir.
    # Only use for testing currently with https://github.com/ceph/ceph/pull/34574.
#    echo "Setting expected mdtest hard files to ${MDTEST_HARD_FILES}"
#    mkdir -p "${mdt_hard_dir}"
#    setfattr -n ceph.dir.expected_files -v ${MDTEST_HARD_FILES} "${mdt_hard_dir}"
#    setfattr -n ceph.dir.hot -v 1 "${mdt_hard_dir}"
  done
}

# you should not edit anything below this line
set -eo pipefail  # better error handling

io500_ini="${1:-""}"
if [[ -z "$io500_ini" ]]; then
  echo "error: ini file must be specified.  usage: $0 "
  exit 1
fi
if [[ ! -s "$io500_ini" ]]; then
  echo "error: ini file '$io500_ini' not found or empty"
  exit 2
fi

function get_ini_section_param() {
  local section="$1"
  local param="$2"
  local inside=false

  while read LINE; do
    LINE=$(sed -e 's/ *#.*//' -e '1s/ *= */=/' <<<$LINE)
    $inside && [[ "$LINE" =~ "[.*]" ]] && inside=false && break
    [[ -n "$section" && "$LINE" =~ "[$section]" ]] && inside=true && continue
    ! $inside && continue
    #echo $LINE | awk -F = "/^$param/ { print \$2 }"
    if [[ $(echo $LINE | grep "^$param *=" ) != "" ]] ; then
      # echo "$section : $param : $inside : $LINE" >> parsed.txt # debugging
      echo $LINE | sed -e "s/[^=]*=[ \t]*\(.*\)/\1/"
      return
    fi
  done < $io500_ini
  echo ""
}

function get_ini_param() {
  local section="$1"
  local param="$2"
  local default="$3"

  # try and get the most-specific param first, then more generic params
  val=$(get_ini_section_param $section $param)
  [ -n "$val" ] || val="$(get_ini_section_param ${section%-*} $param)"
  [ -n "$val" ] || val="$(get_ini_section_param global $param)"

  echo "${val:-$default}" |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/'
}

function get_ini_run_param() {
  local section="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param $section noRun)

  # logic is reversed from "noRun=TRUE" to "run=False"
  [[ $val = [Tt][Rr][Uu][Ee] ]] && echo "False" || echo "$default"
}

function get_ini_global_param() {
  local param="$1"
  local default="$2"
  local val

  val=$(get_ini_section_param global $param |
  	sed -e 's/[Ff][Aa][Ll][Ss][Ee]/False/' -e 's/[Tt][Rr][Uu][Ee]/True/')

  echo "${val:-$default}"
}

# does the write phase and enables the subsequent read
io500_run_ior_easy="$(get_ini_run_param ior-easy True)"
# does the creat phase and enables the subsequent stat
io500_run_md_easy="$(get_ini_run_param mdtest-easy True)"
# does the write phase and enables the subsequent read
io500_run_ior_hard="$(get_ini_run_param ior-hard True)"
# does the creat phase and enables the subsequent read
io500_run_md_hard="$(get_ini_run_param mdtest-hard True)"
io500_run_find="$(get_ini_run_param find True)"
io500_run_ior_easy_read="$(get_ini_run_param ior-easy-read True)"
io500_run_md_easy_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_ior_hard_read="$(get_ini_run_param ior-hard-read True)"
io500_run_md_hard_stat="$(get_ini_run_param mdtest-easy-stat True)"
io500_run_md_hard_read="$(get_ini_run_param mdtest-easy-stat True)"
# turn this off if you want to just run find by itself
io500_run_md_easy_delete="$(get_ini_run_param mdtest-easy-delete True)"
# turn this off if you want to just run find by itself
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_md_hard_delete="$(get_ini_run_param mdtest-hard-delete True)"
io500_run_mdreal="$(get_ini_run_param mdreal False)"
# attempt to clean the cache after every benchmark, useful for validating the performance results and for testing with a local node; it uses the io500_clean_cache_cmd (can be overwritten); make sure the user can write to /proc/sys/vm/drop_caches
io500_clean_cache="$(get_ini_global_param drop-caches False)"
io500_clean_cache_cmd="$(get_ini_global_param drop-caches-cmd)"
io500_cleanup_workdir="$(get_ini_run_param cleanup)"
# Stonewalling timer, set to 300 to be an official run; set to 0, if you never want to abort...
io500_stonewall_timer=$(get_ini_param debug stonewall-time 300)
# Choose regular for an official regular submission or scc for a Student Cluster Competition submission to execute the test cases for 30 seconds instead of 300 seconds
io500_rules="regular"

# to run this benchmark, find and edit each of these functions.  Please also
# also edit 'extra_description' function to help us collect the required data.
function main {
  setup_directories
  setup_paths
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional

  run_benchmarks

  if [[ ! -s "system-information.txt" ]]; then
    echo "Warning: please create a system-information.txt description by"
    echo "copying the information from https://vi4io.org/io500-info-creator/"
  else
    cp "system-information.txt" $io500_result_dir
  fi

  create_tarball
}

function setup_ior_easy {
  local params

  io500_ior_easy_size=$(get_ini_param ior-easy blockSize 9920000m | tr -d m)
  val=$(get_ini_param ior-easy API POSIX)
  [ -n "$val" ] && params+=" -a $val"
  val="$(get_ini_param ior-easy transferSize)"
  [ -n "$val" ] && params+=" -t $val"
  val="$(get_ini_param ior-easy hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-easy posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_easy_params="$params"
  echo -n ""
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves

  val=$(get_ini_param mdtest-easy n 1000000)
  [ -n "$val" ] && io500_mdtest_easy_files_per_proc="$val"
  val=$(get_ini_param mdtest-easy API POSIX)
  [ -n "$val" ] && io500_mdtest_easy_params+=" -a $val"
  val=$(get_ini_param mdtest-easy posix.odirect)
  [ "$val" = "True" ] && io500_mdtest_easy_params+=" --posix.odirect"
  echo -n ""
}

function setup_ior_hard {
  local params

  io500_ior_hard_api=$(get_ini_param ior-hard API POSIX)
  io500_ior_hard_writes_per_proc="$(get_ini_param ior-hard segmentCount 10000000)"
  val="$(get_ini_param ior-hard hintsFileName)"
  [ -n "$val" ] && params+=" -U $val"
  val="$(get_ini_param ior-hard posix.odirect)"
  [ "$val" = "True" ] && params+=" --posix.odirect"
  val="$(get_ini_param ior-easy verbosity)"
  if [ -n "$val" ]; then
    for i in $(seq $val); do
      params+=" -v"
    done
  fi
  io500_ior_hard_api_specific_options="$params"
  echo -n ""
}

function setup_mdt_hard {
  val=$(get_ini_param mdtest-hard n 1000000)
  [ -n "$val" ] && io500_mdtest_hard_files_per_proc="$val"
  io500_mdtest_hard_api="$(get_ini_param mdtest-hard API POSIX)"
  io500_mdtest_hard_api_specific_options=""
  echo -n ""
}

function setup_find {
  val="$(get_ini_param find external-script)"
  [ -z "$val" ] && io500_find_mpi="True" && io500_find_cmd="$PWD/bin/pfind" ||
    io500_find_cmd="$val"
  # uses stonewalling, run pfind
  io500_find_cmd_args="$(get_ini_param find external-extra-args)"
  echo -n ""
}

function setup_mdreal {
  echo -n ""
}

function run_benchmarks {
  local app_first=$((RANDOM % 100))
  local app_rc=0

  # run the app and C version in random order to try and avoid bias
  (( app_first >= 50 )) && $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp || app_rc=$?

  # Important: source the io500_fixed.sh script.  Do not change it. If you
  # discover a need to change it, please email the mailing list to discuss.
  source build/io500-dev/utilities/io500_fixed.sh 2>&1 |
    tee $io500_result_dir/io-500-summary.$timestamp.txt

  (( $app_first >= 50 )) && return $app_rc

  echo "The io500.sh was run"
  echo
  echo "Running the C version of the benchmark now"
  # run the app and C version in random order to try and avoid bias
  $io500_mpirun $io500_mpiargs $PWD/io500 $io500_ini --timestamp $timestamp
}

create_tarball() {
  local sourcedir=$(dirname $io500_result_dir)
  local fname=$(basename ${io500_result_dir%-scr})
  local tarball=$sourcedir/io500-$HOSTNAME-$fname.tgz

  cp -v $0 $io500_ini $io500_result_dir
  tar czf $tarball -C $sourcedir $fname-{app,scr}
  echo "Created result tarball $tarball"
}

# Information fields; these provide information about your system hardware
# Use https://vi4io.org/io500-info-creator/ to generate information about
# your hardware that you want to include publicly!
function extra_description {
  # UPDATE: Please add your information into "system-information.txt" pasting the output of the info-creator
  # EXAMPLE:
  # io500_info_system_name='xxx'
  # DO NOT ADD IT HERE
  :
}

main
ior_easy_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Wed Jul  8 14:47:49 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy/stonewall
Machine             : Linux o03
TestID              : 0
StartTime           : Wed Jul  8 14:47:49 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy
FS                  : 429.8 TiB   Used FS: 7.2%   Inodes: 72.2 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 8
tasks               : 256
clients per node    : 32
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 2421.88 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 2662879723520000.
WARNING: Stat() of aggregate file size      = 30101278294016.
WARNING: Using actual aggregate bytes moved = 30101278294016.
read      69860      34947      0.000083    10158080000 2048.00    0.193327   410.72     0.000712   410.92     0   
Max Read:  69860.27 MiB/sec (73253.80 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        69860.27   69860.27   69860.27       0.00   34930.14   34930.14   34930.14       0.00  410.91761         NA            NA     0    256  32    1   1     1        1         0    0      1 10401873920000  2097152 28706816.0 CEPHFS      0
Finished            : Wed Jul  8 14:54:40 2020
ior_easy_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Wed Jul  8 14:20:55 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -t 2m -b 9920000m -F -i 1 -C -Q 1 -g -G 27 -k -e -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy/ior_file_easy -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux o03
TestID              : 0
StartTime           : Wed Jul  8 14:20:55 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy
FS                  : 429.9 TiB   Used FS: 7.0%   Inodes: 7.7 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_easy/ior_file_easy
access              : file-per-process
type                : independent
segments            : 1
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 8
tasks               : 256
clients per node    : 32
repetitions         : 1
xfersize            : 2 MiB
blocksize           : 9.46 TiB
aggregate filesize  : 2421.88 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 23104 max: 56068 -- min data: 45.1 GiB mean data: 79.9 GiB time: 301.5s
WARNING: Expected aggregate file size       = 2662879723520000.
WARNING: Stat() of aggregate file size      = 30101278294016.
WARNING: Using actual aggregate bytes moved = 30101278294016.
WARNING: maybe caused by deadlineForStonewalling
write     61852      30928      0.000075    10158080000 2048.00    0.029429   464.09     0.000762   464.12     0   
Max Write: 61851.84 MiB/sec (64856.36 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       61851.84   61851.84   61851.84       0.00   30925.92   30925.92   30925.92       0.00  464.12226     301.53      69424.53     0    256  32    1   1     1        1         0    0      1 10401873920000  2097152 28706816.0 CEPHFS      0
Finished            : Wed Jul  8 14:28:39 2020
ior_hard_read
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Wed Jul  8 14:56:15 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -r -R -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard/stonewall
Machine             : Linux o03
TestID              : 0
StartTime           : Wed Jul  8 14:56:15 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard
FS                  : 429.8 TiB   Used FS: 7.2%   Inodes: 72.2 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 1000000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 8
tasks               : 256
clients per node    : 32
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 10.94 TiB

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
WARNING: Expected aggregate file size       = 12034048000000.
WARNING: Stat() of aggregate file size      = 3798980476928.
WARNING: Using actual aggregate bytes moved = 3798980476928.
read      14011      314057     257.33      45.91      45.91      1.26       257.33     0.000264   258.59     0   
Max Read:  14010.77 MiB/sec (14691.35 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
read        14010.77   14010.77   14010.77       0.00  312528.80  312528.80  312528.80       0.00  258.58614         NA            NA     0    256  32    1   0     1        1         0    0 1000000    47008    47008 3622990.0 CEPHFS      0
Finished            : Wed Jul  8 15:00:33 2020
ior_hard_write
IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
Began               : Wed Jul  8 14:35:10 2020
Command line        : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/ior -w -s 1000000 -a CEPHFS --cephfs.user=admin --cephfs.conf=/etc/ceph/ceph.conf --cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0 -i 1 -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -o /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard/IOR_file -O stoneWallingStatusFile=/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard/stonewall -O stoneWallingWearOut=1 -D 300
Machine             : Linux o03
TestID              : 0
StartTime           : Wed Jul  8 14:35:10 2020
Path                : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard
FS                  : 429.9 TiB   Used FS: 6.4%   Inodes: 62.6 Mi   Used Inodes: 100.0%

Options: 
api                 : CEPHFS
apiVersion          : 
test filename       : /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/ior_hard/IOR_file
access              : single-shared-file
type                : independent
segments            : 1000000
ordering in a file  : sequential
ordering inter file : constant task offset
task offset         : 1
nodes               : 8
tasks               : 256
clients per node    : 32
repetitions         : 1
xfersize            : 47008 bytes
blocksize           : 47008 bytes
aggregate filesize  : 10.94 TiB
stonewallingTime    : 300
stoneWallingWearOut : 1

Results: 

access    bw(MiB/s)  IOPS       Latency(s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ----       ----------  ---------- ---------  --------   --------   --------   --------   ----
stonewalling pairs accessed min: 293913 max: 315686 -- min data: 12.9 GiB mean data: 13.7 GiB time: 300.1s
WARNING: Expected aggregate file size       = 12034048000000.
WARNING: Stat() of aggregate file size      = 3798980476928.
WARNING: Using actual aggregate bytes moved = 3798980476928.
WARNING: maybe caused by deadlineForStonewalling
write     11324      252813     300.66      45.91      45.91      0.259972   319.67     0.000547   319.93     0   
Max Write: 11324.47 MiB/sec (11874.56 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev   Max(OPs)   Min(OPs)  Mean(OPs)     StdDev    Mean(s) Stonewall(s) Stonewall(MiB) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt   blksiz    xsize aggs(MiB)   API RefNum
write       11324.47   11324.47   11324.47       0.00  252607.27  252607.27  252607.27       0.00  319.92593     300.14      11933.52     0    256  32    1   0     1        1         0    0 1000000    47008    47008 3622990.0 CEPHFS      0
Finished            : Wed Jul  8 14:40:31 2020
mdtest_easy_delete
-- started at 07/08/2020 15:02:09 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.8 TiB   Used FS: 7.2%   Inodes: 0.1 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 61440000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :     117305.025     117304.459     117304.925          0.095
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.379          0.379          0.379          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        523.765        523.763        523.763          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          2.637          2.637          2.637          0.000
-- finished at 07/08/2020 15:10:55 --

mdtest_easy_stat
-- started at 07/08/2020 14:54:41 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_easy-stonewall' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.8 TiB   Used FS: 7.2%   Inodes: 0.1 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 61440000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :     665211.877     665167.687     665205.449          9.911
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         92.368         92.362         92.362          0.001
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/08/2020 14:56:14 --

mdtest_easy_write
-- started at 07/08/2020 14:28:40 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-F' '-P' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_easy' '-n' '240000' '-u' '-L' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_easy-stonewall' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.9 TiB   Used FS: 6.4%   Inodes: 0.0 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 61440000 files
Continue stonewall hit min: 185472 max: 240000 avg: 235462.2 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :     157848.753     157848.576     157848.697          0.035
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA     200812.753             NA
   Tree creation             :         13.727         13.727         13.727          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        389.234        389.233        389.233          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        300.172             NA
   Tree creation             :          0.073          0.073          0.073          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/08/2020 14:35:09 --

mdtest_hard_delete
-- started at 07/08/2020 15:12:56 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-r' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.8 TiB   Used FS: 7.2%   Inodes: 0.1 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :      15295.214      15295.200      15295.208          0.003
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.344          0.344          0.344          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :        405.209        405.209        405.209          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          2.904          2.904          2.904          0.000
-- finished at 07/08/2020 15:19:45 --

mdtest_hard_read
-- started at 07/08/2020 15:11:26 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-X' '-E' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.8 TiB   Used FS: 7.2%   Inodes: 0.1 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :      69562.931      69562.522      69562.834          0.077
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :         89.096         89.096         89.096          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/08/2020 15:12:55 --

mdtest_hard_stat
-- started at 07/08/2020 15:00:35 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-T' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.8 TiB   Used FS: 7.2%   Inodes: 0.1 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files

SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :      66614.771      66612.546      66614.328          0.513
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :          0.000          0.000          0.000          0.000
   File stat                 :         93.042         93.039         93.039          0.001
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   Tree creation             :          0.000          0.000          0.000          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/08/2020 15:02:08 --

mdtest_hard_write
-- started at 07/08/2020 14:40:32 --

mdtest-3.3.0+dev was launched with 256 total task(s) on 8 node(s)
Command line used: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/bin/mdtest '-Y' '-C' '-t' '-F' '-P' '-w' '3901' '-e' '3901' '-d' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard' '-n' '100000' '-x' '/tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr/mdt_hard-stonewall' '-a' 'CEPHFS' '--cephfs.user=admin' '--cephfs.conf=/etc/ceph/ceph.conf' '--cephfs.prefix=/tmp/cbt/mnt/cbt-cephfs-kernel/0' '-N' '1' '-W' '300'
Path: /tmp/cbt/mnt/cbt-cephfs-kernel/0/io500/io500-app/datafiles/2020.07.08-13.32.59-scr
FS: 429.8 TiB   Used FS: 7.2%   Inodes: 0.1 Mi   Used Inodes: 100.0%

Nodemap: 1111111111111111111111111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
V-0: Rank   0 Line  2166 Shifting ranks by 32 for each phase.
256 tasks, 25600000 files
Continue stonewall hit min: 19264 max: 24210 avg: 21938.9 


SUMMARY rate: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :      18133.744      18133.463      18133.697          0.056
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA      18727.261             NA
   Tree creation             :          9.277          9.277          9.277          0.000
   Tree removal              :          0.000          0.000          0.000          0.000

SUMMARY time: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation             :        341.786        341.780        341.781          0.001
   File stat                 :          0.000          0.000          0.000          0.000
   File read                 :          0.000          0.000          0.000          0.000
   File removal              :          0.000          0.000          0.000          0.000
   File create (stonewall)   :             NA             NA        299.903             NA
   Tree creation             :          0.108          0.108          0.108          0.000
   Tree removal              :          0.000          0.000          0.000          0.000
-- finished at 07/08/2020 14:46:14 --

result_summary
[RESULT] BW   phase 1            ior_easy_write               60.402 GiB/s : time 464.09 seconds
[RESULT] IOPS phase 1         mdtest_easy_write              157.849 kiops : time 389.23 seconds
[RESULT] BW   phase 2            ior_hard_write               11.059 GiB/s : time 319.67 seconds
[RESULT] IOPS phase 2         mdtest_hard_write               18.134 kiops : time 341.79 seconds
[RESULT] IOPS phase 3                      find              718.830 kiops : time  94.09 seconds
[RESULT] BW   phase 3             ior_easy_read               68.223 GiB/s : time 410.72 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat              665.212 kiops : time  92.37 seconds
[RESULT] BW   phase 4             ior_hard_read               13.683 GiB/s : time 257.33 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat               66.615 kiops : time  93.04 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete              117.305 kiops : time 523.77 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               69.563 kiops : time  89.10 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete               15.295 kiops : time 410.63 seconds
[SCORE] Bandwidth 28.1004 GiB/s : IOPS 101.629 kiops : TOTAL 53.4398