Oakforest-PACS

Institution JCAHPC
Client Procs Per Node
Client Operating System
Client Operating System Version
Client Kernel Version

DATA SERVER

Storage Type
Volatile Memory
Storage Interface
Network
Software Version
OS Version

INFORMATION

Client Nodes 256
Client Total Procs 8,192
Metadata Nodes 0
Metadata Storage Devices 0
Data Nodes 0
Data Storage Devices 0

METADATA

Easy Write 56.39 kIOP/s
Easy Stat 157.21 kIOP/s
Easy Delete 36.12 kIOP/s
Hard Write 39.43 kIOP/s
Hard Read 87.17 kIOP/s
Hard Stat 100.47 kIOP/s
Hard Delete 63.62 kIOP/s

Submitted Files

io500
#!/bin/bash
#PJM -L rscgrp=regular-flat
#PJM -L node=256
#PJM --mpi proc=8192
#PJM -L elapse=2:00:00
#PJM -g xg17i000
#PJM -j
#
# INSTRUCTIONS:
# Edit this file as needed for your machine.
# This simplified version is just for running on a single node.
# It is a simplified version of the site-configs/sandia/startup.sh which include SLURM directives.
# Most of the variables set in here are needed for io500_fixed.sh which gets sourced at the end of this.

set -euo pipefail  # better error handling

DIR=/work/xg17i000/x10007/io-500-dev
export I_MPI_PIN_PROCESSOR_EXCLUDE_LIST=0,68,136,204

# turn these to True successively while you debug and tune this benchmark.
# for each one that you turn to true, go and edit the appropriate function.
# to find the function name, see the 'main' function.
# These are listed in the order that they run.
io500_run_ior_easy="True" # does the write phase and enables the subsequent read
io500_run_md_easy="True"  # does the creat phase and enables the subsequent stat
io500_run_ior_hard="True" # does the write phase and enables the subsequent read
io500_run_md_hard="True"  # does the creat phase and enables the subsequent read
io500_run_find="True"     
io500_run_ior_easy_read="True"
io500_run_md_easy_stat="True"
io500_run_ior_hard_read="True"
io500_run_md_hard_stat="True"
io500_run_md_hard_read="True"  
io500_run_md_easy_delete="True" # turn this off if you want to just run find by itself
io500_run_md_hard_delete="True" # turn this off if you want to just run find by itself
io500_run_mdreal="False"  # this one is optional
io500_cleanup_workdir="False"  # this flag is currently ignored. You'll need to clean up your data files manually if you want to.

function main {
  setup_directories
  setup_paths    
  setup_ior_easy # required if you want a complete score
  setup_ior_hard # required if you want a complete score
  setup_mdt_easy # required if you want a complete score
  setup_mdt_hard # required if you want a complete score
  setup_find     # required if you want a complete score
  setup_mdreal   # optional
  run_benchmarks
}

function setup_directories {
  # set directories for where the benchmark files are created and where the results will go.
  # If you want to set up stripe tuning on your output directories or anything similar, then this is good place to do it. 
  timestamp=`date +%Y.%m.%d-%H.%M.%S`           # create a uniquifier
  io500_workdir=$DIR/datafiles/io500.$timestamp # directory where the data will be stored
  io500_result_dir=$DIR/results/$timestamp      # the directory where the output results will be kept
  mkdir -p $io500_workdir $io500_result_dir
  mkdir -p $io500_workdir/ior_easy
  lfs setstripe --stripe-count 2 ${io500_workdir}/ior_easy
  mkdir -p $io500_workdir/ior_hard
  lfs setstripe --stripe-count 100 ${io500_workdir}/ior_hard
}

function setup_paths {
  # Set the paths to the binaries.  If you ran ./utilities/prepare.sh successfully, then binaries are in ./bin/
  io500_ior_cmd=$DIR/bin/ior
  io500_mdtest_cmd=$DIR/bin/mdtest
  io500_mdreal_cmd=$DIR/bin/md-real-io
  io500_mpirun="mpiexec.hydra"
  io500_mpiargs="-n ${PJM_MPI_PROC}"
}

function setup_ior_easy {
  io500_ior_easy_params="-t 2048k -b 4g -F" # 2M writes, 2 GB per proc, file per proc 
}

function setup_mdt_easy {
  io500_mdtest_easy_params="-u -L" # unique dir per thread, files only at leaves
  io500_mdtest_easy_files_per_proc=2500
}

function setup_ior_hard {
  io500_ior_hard_writes_per_proc=2200
}

function setup_mdt_hard {
  io500_mdtest_hard_files_per_proc=1000
}

function setup_find {
  #
  # setup the find command. This is an area where innovation is allowed.
  #    There are three default options provided. One is a serial find, one is python
  #    parallel version, one is C parallel version.  Current default is to use serial.
  #    But it is very slow. We recommend to either customize or use the C parallel version.
  #    Instructions below.
  #    If a custom approach is used, please provide enough info so others can reproduce.

  # the serial version that should run (SLOWLY) without modification
  io500_find_mpi="False"
  io500_find_cmd=$DIR/bin/sfind.sh
  io500_find_cmd_args=""

  # a parallel version in C, the -s adds a stonewall
  #   for a real run, turn -s (stonewall) off or set it at 300 or more
  #   to prepare this (assuming you've run ./utilities/prepare.sh already):
  #   > cd build/pfind
  #   > ./prepare.sh
  #   > ./compile.sh
  #   > cp pfind ../../bin/ 
  #   If you use io500_find_mpi="True", then this will run with the same
  #   number of MPI nodes and ranks as the other phases.
  #   If you prefer another number, and fewer might be better here,
  #   Then you can set io500_find_mpi to be "False" and write a wrapper
  #   script for this which sets up MPI as you would like.  Then change
  #   io500_find_cmd to point to your wrapper script. 
  io500_find_mpi="True"
  io500_find_cmd="$DIR/bin/pfind"
  io500_find_cmd_args="-s 300 -r $io500_result_dir/pfind_results"
  

  # a parallel version that might require some work, it is a python3 program 
  # if you used utilities/prepare.sh, it should already be there. 
  # change the stonewall to 300 to get a valid score
  #set +u
  #export PYTHONPATH=$PYTHONPATH:$PWD/bin/lib
  #io500_find_mpi="True"
  #io500_find_cmd="$PWD/bin/pfind -stonewall 1"
  #io500_find_cmd_args=""
}

function setup_mdreal {
  io500_mdreal_params="-P=5000 -I=1000"
}

function run_benchmarks {
  # Important: source the io500_fixed.sh script.  Do not change it. If you discover
  # a need to change it, please email the mailing list to discuss
  source ./bin/io500_fixed.sh 2>&1 | tee $io500_result_dir/io-500-summary.$timestamp.txt
}

# Add key/value pairs defining your system if you want
# This function needs to exist although it doesn't have to output anything if you don't want
function extra_description {
  echo "System_name='JCAHPC Oakforest-PACS'"
}

main
rm -rf $io500_workdir
ior_easy_read
IOR-3.1.0: MPI Coordinated Test of Parallel I/O

ior WARNING: fsync() only available in POSIX.  Using value of 0.
Began: Thu Nov 23 19:35:07 2017
Command line used: /work/xg17i000/x10007/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 2048k -b 4g -F -o /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_easy/ior_file_easy
Machine: Linux c1282.ofp

Test 0 started: Thu Nov 23 19:35:07 2017
Summary:
	api                = MPIIO (version=3, subversion=1)
	test filename      = /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_easy/ior_file_easy
	access             = file-per-process
	ordering in a file = sequential offsets
	ordering inter file= constant task offsets = 1
	clients            = 8192 (32 per node)
	repetitions        = 1
	xfersize           = 2 MiB
	blocksize          = 4 GiB
	aggregate filesize = 32768 GiB

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      115909     4194304    2048.00    0.249012   289.22     0.023074   289.49     0   

Max Read:  115908.90 MiB/sec (121539.29 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
read       115908.90  115908.90  115908.90       0.00  289.48969 0 8192 32 1 1 1 1 0 0 1 4294967296 2097152 35184372088832 MPIIO 0

Finished: Thu Nov 23 19:39:57 2017
ior_easy_write
IOR-3.1.0: MPI Coordinated Test of Parallel I/O

ior WARNING: fsync() only available in POSIX.  Using value of 0.
Began: Thu Nov 23 19:12:17 2017
Command line used: /work/xg17i000/x10007/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 2048k -b 4g -F -o /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_easy/ior_file_easy
Machine: Linux c1282.ofp

Test 0 started: Thu Nov 23 19:12:17 2017
Summary:
	api                = MPIIO (version=3, subversion=1)
	test filename      = /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_easy/ior_file_easy
	access             = file-per-process
	ordering in a file = sequential offsets
	ordering inter file= constant task offsets = 1
	clients            = 8192 (32 per node)
	repetitions        = 1
	xfersize           = 2 MiB
	blocksize          = 4 GiB
	aggregate filesize = 32768 GiB

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
write     89116      4194304    2048.00    0.423151   376.08     0.024756   376.52     0   

Max Write: 89116.26 MiB/sec (93445.18 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
write       89116.26   89116.26   89116.26       0.00  376.52422 0 8192 32 1 1 1 1 0 0 1 4294967296 2097152 35184372088832 MPIIO 0

Finished: Thu Nov 23 19:18:34 2017
ior_hard_read
IOR-3.1.0: MPI Coordinated Test of Parallel I/O

ior WARNING: fsync() only available in POSIX.  Using value of 0.
Began: Thu Nov 23 19:42:24 2017
Command line used: /work/xg17i000/x10007/io-500-dev/bin/ior -r -R -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 2200 -o /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_hard/IOR_file
Machine: Linux c1282.ofp

Test 0 started: Thu Nov 23 19:42:24 2017
Summary:
	api                = MPIIO (version=3, subversion=1)
	test filename      = /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_hard/IOR_file
	access             = single-shared-file
	ordering in a file = sequential offsets
	ordering inter file= constant task offsets = 1
	clients            = 8192 (32 per node)
	repetitions        = 1
	xfersize           = 47008 bytes
	blocksize          = 47008 bytes
	aggregate filesize = 789.01 GiB

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
read      7119       45.91      45.91      0.873628   112.58     0.032957   113.49     0   

Max Read:  7119.38 MiB/sec (7465.21 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
read         7119.38    7119.38    7119.38       0.00  113.48600 0 8192 32 1 0 1 1 0 0 2200 47008 47008 847196979200 MPIIO 0

Finished: Thu Nov 23 19:44:18 2017
ior_hard_write
IOR-3.1.0: MPI Coordinated Test of Parallel I/O

ior WARNING: fsync() only available in POSIX.  Using value of 0.
Began: Thu Nov 23 19:24:54 2017
Command line used: /work/xg17i000/x10007/io-500-dev/bin/ior -w -C -Q 1 -g -G 27 -k -e -t 47008 -b 47008 -s 2200 -o /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_hard/IOR_file
Machine: Linux c1282.ofp

Test 0 started: Thu Nov 23 19:24:54 2017
Summary:
	api                = MPIIO (version=3, subversion=1)
	test filename      = /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/ior_hard/IOR_file
	access             = single-shared-file
	ordering in a file = sequential offsets
	ordering inter file= constant task offsets = 1
	clients            = 8192 (32 per node)
	repetitions        = 1
	xfersize           = 47008 bytes
	blocksize          = 47008 bytes
	aggregate filesize = 789.01 GiB

access    bw(MiB/s)  block(KiB) xfer(KiB)  open(s)    wr/rd(s)   close(s)   total(s)   iter
------    ---------  ---------- ---------  --------   --------   --------   --------   ----
write     2413.35    45.91      45.91      1.26       333.49     0.032164   334.78     0   

Max Write: 2413.35 MiB/sec (2530.58 MB/sec)

Summary of all tests:
Operation   Max(MiB)   Min(MiB)  Mean(MiB)     StdDev    Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
write        2413.35    2413.35    2413.35       0.00  334.78363 0 8192 32 1 0 1 1 0 0 2200 47008 47008 847196979200 MPIIO 0

Finished: Thu Nov 23 19:30:30 2017
mdtest_easy_delete
-- started at 11/23/2017 19:45:56 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -r -F -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_easy -n 2500 -u -L
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 2.1%

8192 tasks, 20480000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :      36116.359      36116.359      36116.359          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.893          0.893          0.893          0.000

-- finished at 11/23/2017 19:55:25 --
mdtest_easy_stat
-- started at 11/23/2017 19:40:05 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -T -F -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_easy -n 2500 -u -L
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 2.1%

8192 tasks, 20480000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :     157208.126     157208.126     157208.126          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/23/2017 19:42:16 --
mdtest_easy_write
-- started at 11/23/2017 19:18:42 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -C -F -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_easy -n 2500 -u -L
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 1.9%

8192 tasks, 20480000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      56394.151      56394.151      56394.151          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          4.704          4.704          4.704          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/23/2017 19:24:46 --
mdtest_hard_delete
-- started at 11/23/2017 19:57:15 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -r -t -F -w 3901 -e 3901 -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_hard -n 1000
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 1.9%

8192 tasks, 8192000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :      63617.150      63617.150      63617.150          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          4.225          4.225          4.225          0.000

-- finished at 11/23/2017 19:59:24 --
mdtest_hard_read
-- started at 11/23/2017 19:55:33 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -E -t -F -w 3901 -e 3901 -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_hard -n 1000
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 1.9%

8192 tasks, 8192000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :      87168.179      87168.179      87168.179          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/23/2017 19:57:07 --
mdtest_hard_stat
-- started at 11/23/2017 19:44:26 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -T -t -F -w 3901 -e 3901 -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_hard -n 1000
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 2.1%

8192 tasks, 8192000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :          0.000          0.000          0.000          0.000
   File stat         :     100467.989     100467.989     100467.989          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :          0.000          0.000          0.000          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/23/2017 19:45:48 --
mdtest_hard_write
-- started at 11/23/2017 19:30:38 --

mdtest-1.9.3 was launched with 8192 total task(s) on 256 node(s)
Command line used: /work/xg17i000/x10007/io-500-dev/bin/mdtest -C -t -F -w 3901 -e 3901 -d /work/xg17i000/x10007/io-500-dev/datafiles/io500.2017.11.23-19.12.08/mdt_hard -n 1000
Path: 
FS: 23597.0 TiB   Used FS: 9.5%   Inodes: 12312.0 Mi   Used Inodes: 2.0%

8192 tasks, 8192000 files

SUMMARY: (of 1 iterations)
   Operation                      Max            Min           Mean        Std Dev
   ---------                      ---            ---           ----        -------
   File creation     :      39432.544      39432.544      39432.544          0.000
   File stat         :          0.000          0.000          0.000          0.000
   File read         :          0.000          0.000          0.000          0.000
   File removal      :          0.000          0.000          0.000          0.000
   Tree creation     :        455.160        455.160        455.160          0.000
   Tree removal      :          0.000          0.000          0.000          0.000

-- finished at 11/23/2017 19:34:06 --
result_summary
[RESULT] BW   phase 1            ior_easy_write               87.027 GB/s : time 376.52 seconds
[RESULT] IOPS phase 1         mdtest_easy_write               56.394 kiops : time 371.57 seconds
[RESULT] BW   phase 2            ior_hard_write                2.357 GB/s : time 334.78 seconds
[RESULT] IOPS phase 2         mdtest_hard_write               39.432 kiops : time 216.03 seconds
[RESULT] IOPS phase 3                      find              548.670 kiops : time  52.27 seconds
[RESULT] BW   phase 3             ior_easy_read              113.192 GB/s : time 289.49 seconds
[RESULT] IOPS phase 4          mdtest_easy_stat              157.208 kiops : time 138.49 seconds
[RESULT] BW   phase 4             ior_hard_read                6.952 GB/s : time 113.49 seconds
[RESULT] IOPS phase 5          mdtest_hard_stat              100.468 kiops : time  89.83 seconds
[RESULT] IOPS phase 6        mdtest_easy_delete               36.116 kiops : time 576.52 seconds
[RESULT] IOPS phase 7          mdtest_hard_read               87.168 kiops : time 102.24 seconds
[RESULT] IOPS phase 8        mdtest_hard_delete               63.617 kiops : time 137.37 seconds
[SCORE] Bandwidth 20.0437 GB/s : IOPS 88.7805 kiops : TOTAL 1779.4897