Remove bad output files for NN:16
This commit is contained in:
parent
a1c4bcdeab
commit
c5fe0d9f2f
|
@ -15,7 +15,7 @@ TESTDIR="$(dirname $LUSTRE_TESTFILE)"
|
|||
mkdir $TESTDIR
|
||||
lfs setstripe -c $(($NN * 2)) $TESTDIR
|
||||
ITERATIONS=1
|
||||
IOR="$(which ior) -i $ITERATIONS -s 1 -t $((16 * 1024 * 1024)) -b $((132 * 1024 * 1024 * 1020)) -o $LUSTRE_TESTFILE -a POSIX -F -e -g -k -w"
|
||||
IOR="$(which ior) -i $ITERATIONS -s 1 -t $((16 * 1024 * 1024)) -b $((4800 * 1024 * 1024 * 32)) -o $LUSTRE_TESTFILE -a POSIX -F -e -g -k -w"
|
||||
ENVVAR="-genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter"
|
||||
MPIEXEC="/opt/ddn/mvapich/bin/mpiexec -ppn 8 -np $((8*$NN)) $ENVVAR -hosts isc17-c04,isc17-c05"
|
||||
|
||||
|
|
|
@ -0,0 +1,137 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_8]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_2]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_5]: [cli_14]: [cli_15]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_10]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 48355 RUNNING AT isc17-c07
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:3@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:3@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:MPIIO#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_8]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_7]: [cli_0]: [cli_14]: [cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_12]: [cli_4]: [cli_5]: [cli_9]: [cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 35109 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:9@isc17-c11] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:9@isc17-c11] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:9@isc17-c11] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:MPIIO#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 10:26:26 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 10:26:26 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 322012446720.
|
||||
write 2555.15 137871360 1024.00 0.014606 120.17 0.000808 120.19 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258878070784.
|
||||
WARNING: Using actual aggregate bytes moved = 318062460928.
|
||||
write 2523.54 137871360 1024.00 0.012289 120.19 0.000852 120.20 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258873876480.
|
||||
WARNING: Using actual aggregate bytes moved = 318693703680.
|
||||
write 2529.36 137871360 1024.00 0.008500 120.15 0.001109 120.16 2
|
||||
|
||||
Max Write: 2555.15 MiB/sec (2679.27 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 2555.15 2523.54 2536.02 13.74 120.18223 0 16 1 3 0 0 1 0 0 1 141180272640 1048576 322012446720 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 10:32:30 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:MPIIO#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 10:32:40 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 10:32:40 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 97859403776.
|
||||
read 776.45 137871360 1024.00 0.047178 120.15 0.000699 120.20 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 99297001472.
|
||||
read 788.17 137871360 1024.00 0.001091 120.15 0.000940 120.15 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 99388227584.
|
||||
read 788.91 137871360 1024.00 0.001376 120.14 0.001174 120.15 2
|
||||
|
||||
Max Read: 788.91 MiB/sec (827.23 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 788.91 776.45 784.51 5.71 120.16321 0 16 1 3 0 0 1 0 0 1 141180272640 1048576 97859403776 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 10:38:41 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488837 0xbe9085 0
|
||||
5 12380898 0xbceae2 0
|
||||
6 12594159 0xc02bef 0
|
||||
0 12422549 0xbd8d95 0
|
||||
2 12568891 0xbfc93b 0
|
||||
4 12510627 0xbee5a3 0
|
||||
7 12459260 0xbe1cfc 0
|
||||
1 12640534 0xc0e116 0
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 08:17:28 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 08:17:28 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258727075840.
|
||||
WARNING: Using actual aggregate bytes moved = 235510169600.
|
||||
write 1869.22 137871360 10240 0.014852 120.14 0.000794 120.16 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258789990400.
|
||||
WARNING: Using actual aggregate bytes moved = 244339179520.
|
||||
write 1936.52 137871360 10240 0.002699 120.33 0.000907 120.33 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 241560453120.
|
||||
write 1912.19 137871360 10240 0.039775 120.43 0.001066 120.47 2
|
||||
|
||||
Max Write: 1936.52 MiB/sec (2030.59 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 1936.52 1869.22 1905.98 27.82 120.32012 0 16 1 3 0 0 1 0 0 1 141180272640 10485760 235510169600 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 08:23:33 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:MPIIO#T:10485760.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 08:23:43 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 08:23:43 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 267911168000.
|
||||
read 2126.81 137871360 10240 0.056208 120.08 0.007795 120.13 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 271193210880.
|
||||
read 2153.64 137871360 10240 0.008455 120.09 0.007829 120.09 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 271402926080.
|
||||
read 2155.45 137871360 10240 0.008286 120.08 0.007919 120.08 2
|
||||
|
||||
Max Read: 2155.45 MiB/sec (2260.15 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 2155.45 2126.81 2145.30 13.09 120.10137 0 16 1 3 0 0 1 0 0 1 141180272640 10485760 267911168000 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 08:29:43 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458248 0xbe1908 0
|
||||
1 12639520 0xc0dd20 0
|
||||
3 12487830 0xbe8c96 0
|
||||
5 12379889 0xbce6f1 0
|
||||
6 12593149 0xc027fd 0
|
||||
0 12421542 0xbd89a6 0
|
||||
2 12567888 0xbfc550 0
|
||||
4 12509619 0xbee1b3 0
|
||||
|
|
@ -0,0 +1,346 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:POSIX#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 10:13:02 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 10:13:02 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258846613504.
|
||||
WARNING: Using actual aggregate bytes moved = 673961738240.
|
||||
write 4819 137871360 1024.00 0.000930 133.37 0.000665 133.37 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258867585024.
|
||||
WARNING: Using actual aggregate bytes moved = 617881796608.
|
||||
write 4363 137871360 1024.00 0.006670 135.07 0.000829 135.07 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 678933037056.
|
||||
write 4745 137871360 1024.00 0.001755 136.44 0.001155 136.45 2
|
||||
|
||||
Max Write: 4819.17 MiB/sec (5053.27 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 4819.17 4362.52 4642.34 200.15 134.96339 0 16 1 3 1 0 1 0 0 1 141180272640 1048576 673961738240 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 10:20:00 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:POSIX#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 10:20:25 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 10:20:25 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/indread16/file
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 153187516416.
|
||||
read 1217.24 137871360 1024.00 0.000789 120.02 0.000565 120.02 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 164603363328.
|
||||
read 1307.91 137871360 1024.00 0.000597 120.02 0.000861 120.02 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 178045059072.
|
||||
read 1414.67 137871360 1024.00 0.000901 120.02 0.001137 120.03 2
|
||||
|
||||
Max Read: 1414.67 MiB/sec (1483.39 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 1414.67 1217.24 1313.28 80.69 120.02189 0 16 1 3 1 0 1 0 0 1 141180272640 1048576 153187516416 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 10:26:25 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000006
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488825 0xbe9079 0
|
||||
7 12459248 0xbe1cf0 0
|
||||
0 12422541 0xbd8d8d 0
|
||||
6 12594150 0xc02be6 0
|
||||
4 12510618 0xbee59a 0
|
||||
1 12640530 0xc0e112 0
|
||||
2 12568887 0xbfc937 0
|
||||
5 12380894 0xbceade 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000002
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12568873 0xbfc929 0
|
||||
1 12640516 0xc0e104 0
|
||||
4 12510610 0xbee592 0
|
||||
5 12380880 0xbcead0 0
|
||||
7 12459243 0xbe1ceb 0
|
||||
6 12594142 0xc02bde 0
|
||||
0 12422532 0xbd8d84 0
|
||||
3 12488821 0xbe9075 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000000
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12568884 0xbfc934 0
|
||||
4 12510620 0xbee59c 0
|
||||
7 12459255 0xbe1cf7 0
|
||||
1 12640531 0xc0e113 0
|
||||
3 12488834 0xbe9082 0
|
||||
5 12380895 0xbceadf 0
|
||||
6 12594155 0xc02beb 0
|
||||
0 12422546 0xbd8d92 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000004
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12568875 0xbfc92b 0
|
||||
3 12488822 0xbe9076 0
|
||||
6 12594145 0xc02be1 0
|
||||
4 12510613 0xbee595 0
|
||||
1 12640523 0xc0e10b 0
|
||||
5 12380890 0xbceada 0
|
||||
0 12422542 0xbd8d8e 0
|
||||
7 12459253 0xbe1cf5 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000007
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594146 0xc02be2 0
|
||||
5 12380887 0xbcead7 0
|
||||
2 12568880 0xbfc930 0
|
||||
4 12510616 0xbee598 0
|
||||
3 12488830 0xbe907e 0
|
||||
1 12640529 0xc0e111 0
|
||||
0 12422545 0xbd8d91 0
|
||||
7 12459256 0xbe1cf8 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000012
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12422535 0xbd8d87 0
|
||||
1 12640524 0xc0e10c 0
|
||||
7 12459249 0xbe1cf1 0
|
||||
6 12594149 0xc02be5 0
|
||||
3 12488829 0xbe907d 0
|
||||
2 12568883 0xbfc933 0
|
||||
5 12380892 0xbceadc 0
|
||||
4 12510622 0xbee59e 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000013
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380885 0xbcead5 0
|
||||
0 12422537 0xbd8d89 0
|
||||
1 12640526 0xc0e10e 0
|
||||
7 12459250 0xbe1cf2 0
|
||||
3 12488827 0xbe907b 0
|
||||
6 12594152 0xc02be8 0
|
||||
2 12568882 0xbfc932 0
|
||||
4 12510619 0xbee59b 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000003
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488824 0xbe9078 0
|
||||
2 12568879 0xbfc92f 0
|
||||
5 12380889 0xbcead9 0
|
||||
1 12640527 0xc0e10f 0
|
||||
0 12422543 0xbd8d8f 0
|
||||
6 12594154 0xc02bea 0
|
||||
4 12510623 0xbee59f 0
|
||||
7 12459257 0xbe1cf9 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000008
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12640517 0xc0e105 0
|
||||
4 12510612 0xbee594 0
|
||||
5 12380883 0xbcead3 0
|
||||
7 12459247 0xbe1cef 0
|
||||
6 12594143 0xc02bdf 0
|
||||
2 12568877 0xbfc92d 0
|
||||
0 12422540 0xbd8d8c 0
|
||||
3 12488826 0xbe907a 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000010
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380884 0xbcead4 0
|
||||
0 12422534 0xbd8d86 0
|
||||
6 12594147 0xc02be3 0
|
||||
1 12640525 0xc0e10d 0
|
||||
3 12488828 0xbe907c 0
|
||||
7 12459254 0xbe1cf6 0
|
||||
4 12510621 0xbee59d 0
|
||||
2 12568886 0xbfc936 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000005
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594141 0xc02bdd 0
|
||||
4 12510609 0xbee591 0
|
||||
3 12488819 0xbe9073 0
|
||||
0 12422531 0xbd8d83 0
|
||||
5 12380881 0xbcead1 0
|
||||
7 12459244 0xbe1cec 0
|
||||
2 12568874 0xbfc92a 0
|
||||
1 12640518 0xc0e106 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000014
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12640521 0xc0e109 0
|
||||
5 12380886 0xbcead6 0
|
||||
2 12568881 0xbfc931 0
|
||||
6 12594151 0xc02be7 0
|
||||
3 12488832 0xbe9080 0
|
||||
0 12422544 0xbd8d90 0
|
||||
4 12510624 0xbee5a0 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000009
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12422536 0xbd8d88 0
|
||||
2 12568878 0xbfc92e 0
|
||||
4 12510615 0xbee597 0
|
||||
7 12459251 0xbe1cf3 0
|
||||
1 12640528 0xc0e110 0
|
||||
3 12488833 0xbe9081 0
|
||||
5 12380893 0xbceadd 0
|
||||
6 12594156 0xc02bec 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000015
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488820 0xbe9074 0
|
||||
4 12510611 0xbee593 0
|
||||
5 12380882 0xbcead2 0
|
||||
7 12459246 0xbe1cee 0
|
||||
6 12594144 0xc02be0 0
|
||||
1 12640519 0xc0e107 0
|
||||
0 12422539 0xbd8d8b 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000001
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12422533 0xbd8d85 0
|
||||
3 12488823 0xbe9077 0
|
||||
1 12640522 0xc0e10a 0
|
||||
5 12380888 0xbcead8 0
|
||||
7 12459252 0xbe1cf4 0
|
||||
4 12510617 0xbee599 0
|
||||
6 12594153 0xc02be9 0
|
||||
2 12568885 0xbfc935 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000011
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12459245 0xbe1ced 0
|
||||
2 12568876 0xbfc92c 0
|
||||
1 12640520 0xc0e108 0
|
||||
4 12510614 0xbee596 0
|
||||
0 12422538 0xbd8d8a 0
|
||||
6 12594148 0xc02be4 0
|
||||
5 12380891 0xbceadb 0
|
||||
3 12488831 0xbe907f 0
|
||||
|
|
@ -0,0 +1,345 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:POSIX#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 08:04:11 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 08:04:11 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258213273600.
|
||||
WARNING: Using actual aggregate bytes moved = 741028659200.
|
||||
write 5379 137871360 10240 0.001035 131.39 0.000464 131.39 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258737561600.
|
||||
WARNING: Using actual aggregate bytes moved = 734506516480.
|
||||
write 5201 137871360 10240 0.001279 134.68 0.001972 134.69 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258716590080.
|
||||
WARNING: Using actual aggregate bytes moved = 734821089280.
|
||||
write 5332 137871360 10240 0.001447 131.42 0.000908 131.42 2
|
||||
|
||||
Max Write: 5378.54 MiB/sec (5639.80 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 5378.54 5200.85 5303.89 75.27 132.50012 0 16 1 3 1 0 1 0 0 1 141180272640 10485760 741028659200 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 08:11:02 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:1#API:POSIX#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 08:11:27 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 08:11:27 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/indread16/file
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 16 (1 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 131.48 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 179358924800.
|
||||
read 1423.81 137871360 10240 0.000705 120.13 0.000564 120.14 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 193619558400.
|
||||
read 1537.12 137871360 10240 0.000548 120.13 0.000686 120.13 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 214549135360.
|
||||
read 1703.21 137871360 10240 0.000848 120.13 0.000894 120.13 2
|
||||
|
||||
Max Read: 1703.21 MiB/sec (1785.94 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 1703.21 1423.81 1554.71 114.74 120.13175 0 16 1 3 1 0 1 0 0 1 141180272640 10485760 179358924800 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 08:17:28 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000006
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593133 0xc027ed 0
|
||||
1 12639508 0xc0dd14 0
|
||||
4 12509606 0xbee1a6 0
|
||||
3 12487817 0xbe8c89 0
|
||||
0 12421532 0xbd899c 0
|
||||
2 12567877 0xbfc545 0
|
||||
7 12458240 0xbe1900 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000002
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593132 0xc027ec 0
|
||||
2 12567873 0xbfc541 0
|
||||
7 12458234 0xbe18fa 0
|
||||
4 12509605 0xbee1a5 0
|
||||
5 12379883 0xbce6eb 0
|
||||
1 12639515 0xc0dd1b 0
|
||||
3 12487825 0xbe8c91 0
|
||||
0 12421538 0xbd89a2 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000000
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379872 0xbce6e0 0
|
||||
4 12509601 0xbee1a1 0
|
||||
1 12639504 0xc0dd10 0
|
||||
6 12593130 0xc027ea 0
|
||||
7 12458230 0xbe18f6 0
|
||||
0 12421524 0xbd8994 0
|
||||
3 12487812 0xbe8c84 0
|
||||
2 12567870 0xbfc53e 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000004
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509603 0xbee1a3 0
|
||||
5 12379875 0xbce6e3 0
|
||||
1 12639507 0xc0dd13 0
|
||||
7 12458236 0xbe18fc 0
|
||||
0 12421530 0xbd899a 0
|
||||
3 12487819 0xbe8c8b 0
|
||||
2 12567879 0xbfc547 0
|
||||
6 12593142 0xc027f6 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000007
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379878 0xbce6e6 0
|
||||
2 12567882 0xbfc54a 0
|
||||
7 12458243 0xbe1903 0
|
||||
6 12593143 0xc027f7 0
|
||||
4 12509614 0xbee1ae 0
|
||||
0 12421537 0xbd89a1 0
|
||||
1 12639516 0xc0dd1c 0
|
||||
3 12487826 0xbe8c92 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000012
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458239 0xbe18ff 0
|
||||
1 12639512 0xc0dd18 0
|
||||
3 12487821 0xbe8c8d 0
|
||||
5 12379885 0xbce6ed 0
|
||||
6 12593145 0xc027f9 0
|
||||
0 12421539 0xbd89a3 0
|
||||
2 12567885 0xbfc54d 0
|
||||
4 12509616 0xbee1b0 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000013
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12639513 0xc0dd19 0
|
||||
6 12593139 0xc027f3 0
|
||||
0 12421535 0xbd899f 0
|
||||
4 12509613 0xbee1ad 0
|
||||
3 12487823 0xbe8c8f 0
|
||||
2 12567883 0xbfc54b 0
|
||||
5 12379884 0xbce6ec 0
|
||||
7 12458244 0xbe1904 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000003
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593134 0xc027ee 0
|
||||
2 12567876 0xbfc544 0
|
||||
7 12458238 0xbe18fe 0
|
||||
0 12421533 0xbd899d 0
|
||||
4 12509609 0xbee1a9 0
|
||||
3 12487818 0xbe8c8a 0
|
||||
1 12639514 0xc0dd1a 0
|
||||
5 12379882 0xbce6ea 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000008
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12639505 0xc0dd11 0
|
||||
2 12567871 0xbfc53f 0
|
||||
7 12458231 0xbe18f7 0
|
||||
3 12487815 0xbe8c87 0
|
||||
0 12421527 0xbd8997 0
|
||||
6 12593135 0xc027ef 0
|
||||
4 12509607 0xbee1a7 0
|
||||
5 12379877 0xbce6e5 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000010
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12421525 0xbd8995 0
|
||||
5 12379873 0xbce6e1 0
|
||||
1 12639506 0xc0dd12 0
|
||||
2 12567872 0xbfc540 0
|
||||
7 12458235 0xbe18fb 0
|
||||
4 12509608 0xbee1a8 0
|
||||
6 12593136 0xc027f0 0
|
||||
3 12487822 0xbe8c8e 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000005
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458233 0xbe18f9 0
|
||||
0 12421528 0xbd8998 0
|
||||
5 12379880 0xbce6e8 0
|
||||
4 12509610 0xbee1aa 0
|
||||
2 12567880 0xbfc548 0
|
||||
3 12487824 0xbe8c90 0
|
||||
6 12593144 0xc027f8 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000014
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593140 0xc027f4 0
|
||||
0 12421536 0xbd89a0 0
|
||||
2 12567881 0xbfc549 0
|
||||
4 12509615 0xbee1af 0
|
||||
7 12458245 0xbe1905 0
|
||||
1 12639517 0xc0dd1d 0
|
||||
3 12487827 0xbe8c93 0
|
||||
5 12379886 0xbce6ee 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000009
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487813 0xbe8c85 0
|
||||
4 12509602 0xbee1a2 0
|
||||
6 12593131 0xc027eb 0
|
||||
7 12458232 0xbe18f8 0
|
||||
5 12379874 0xbce6e2 0
|
||||
0 12421526 0xbd8996 0
|
||||
1 12639509 0xc0dd15 0
|
||||
2 12567875 0xbfc543 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000015
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487814 0xbe8c86 0
|
||||
4 12509604 0xbee1a4 0
|
||||
2 12567874 0xbfc542 0
|
||||
1 12639511 0xc0dd17 0
|
||||
6 12593138 0xc027f2 0
|
||||
0 12421534 0xbd899e 0
|
||||
5 12379879 0xbce6e7 0
|
||||
7 12458242 0xbe1902 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000001
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379876 0xbce6e4 0
|
||||
0 12421529 0xbd8999 0
|
||||
1 12639510 0xc0dd16 0
|
||||
6 12593137 0xc027f1 0
|
||||
7 12458241 0xbe1901 0
|
||||
3 12487820 0xbe8c8c 0
|
||||
4 12509612 0xbee1ac 0
|
||||
2 12567884 0xbfc54c 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000011
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487816 0xbe8c88 0
|
||||
7 12458237 0xbe18fd 0
|
||||
0 12421531 0xbd899b 0
|
||||
2 12567878 0xbfc546 0
|
||||
5 12379881 0xbce6e9 0
|
||||
6 12593141 0xc027f5 0
|
||||
4 12509611 0xbee1ab 0
|
||||
|
|
@ -0,0 +1,243 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_12]: [cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_8]: [cli_22]: [cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_29]: [cli_28]: [cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34923 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:10@isc17-c12] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:10@isc17-c12] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:10@isc17-c12] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:5@isc17-c06] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:5@isc17-c06] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:5@isc17-c06] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:6@isc17-c07] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:6@isc17-c07] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:6@isc17-c07] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:MPIIO#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: [cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_9]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_28]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_10]: [cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_14]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 16409 RUNNING AT isc17-c15
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:5@isc17-c06] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:5@isc17-c06] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:5@isc17-c06] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:12@isc17-c14] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:12@isc17-c14] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:12@isc17-c14] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:MPIIO#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 10:00:33 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 10:00:33 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258878070784.
|
||||
WARNING: Using actual aggregate bytes moved = 394322247680.
|
||||
write 3128.37 68935680 1024.00 0.018345 120.19 0.001042 120.21 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258882265088.
|
||||
WARNING: Using actual aggregate bytes moved = 389842731008.
|
||||
write 3090.10 68935680 1024.00 0.121912 120.19 0.001159 120.31 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258881216512.
|
||||
WARNING: Using actual aggregate bytes moved = 387821076480.
|
||||
write 3074.77 68935680 1024.00 0.094908 120.19 0.001437 120.29 2
|
||||
|
||||
Max Write: 3128.37 MiB/sec (3280.34 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 3128.37 3074.77 3097.75 22.54 120.26981 0 32 2 3 0 0 1 0 0 1 70590136320 1048576 394322247680 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 10:06:39 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:MPIIO#T:1048576.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 10:07:01 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 10:07:01 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 139143938048.
|
||||
read 1103.78 68935680 1024.00 0.057155 120.16 0.014544 120.22 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 141212778496.
|
||||
read 1120.65 68935680 1024.00 0.009195 120.17 0.008226 120.17 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 141608091648.
|
||||
read 1123.94 68935680 1024.00 0.009282 120.15 0.008219 120.16 2
|
||||
|
||||
Max Read: 1123.94 MiB/sec (1178.53 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 1123.94 1103.78 1116.12 8.83 120.18323 0 32 2 3 0 0 1 0 0 1 70590136320 1048576 139143938048 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 10:13:01 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12459209 0xbe1cc9 0
|
||||
1 12640484 0xc0e0e4 0
|
||||
3 12488787 0xbe9053 0
|
||||
5 12380851 0xbceab3 0
|
||||
6 12594110 0xc02bbe 0
|
||||
0 12422502 0xbd8d66 0
|
||||
2 12568843 0xbfc90b 0
|
||||
4 12510577 0xbee571 0
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 07:51:42 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:51:42 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258852904960.
|
||||
WARNING: Using actual aggregate bytes moved = 297806069760.
|
||||
write 2357.33 68935680 10240 0.010628 120.47 0.001143 120.48 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258873876480.
|
||||
WARNING: Using actual aggregate bytes moved = 299567677440.
|
||||
write 2361.22 68935680 10240 0.120874 120.87 0.001215 120.99 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258873876480.
|
||||
WARNING: Using actual aggregate bytes moved = 298686873600.
|
||||
write 2364.97 68935680 10240 0.101102 120.34 0.001640 120.45 2
|
||||
|
||||
Max Write: 2364.97 MiB/sec (2479.85 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 2364.97 2357.33 2361.17 3.12 120.63914 0 32 2 3 0 0 1 0 0 1 70590136320 10485760 297806069760 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 07:57:48 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:MPIIO#T:10485760.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 07:58:09 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:58:09 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 339256279040.
|
||||
read 2692.31 68935680 10240 0.072617 120.09 0.008699 120.17 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 340451655680.
|
||||
read 2702.94 68935680 10240 0.002030 120.12 0.001370 120.12 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 343523983360.
|
||||
read 2727.72 68935680 10240 0.002360 120.10 0.001708 120.10 2
|
||||
|
||||
Max Read: 2727.72 MiB/sec (2860.22 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 2727.72 2692.31 2707.66 14.84 120.13227 0 32 2 3 0 0 1 0 0 1 70590136320 10485760 339256279040 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 08:04:10 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379839 0xbce6bf 0
|
||||
6 12593099 0xc027cb 0
|
||||
0 12421491 0xbd8973 0
|
||||
2 12567840 0xbfc520 0
|
||||
4 12509568 0xbee180 0
|
||||
7 12458199 0xbe18d7 0
|
||||
1 12639471 0xc0dcef 0
|
||||
3 12487781 0xbe8c65 0
|
||||
|
|
@ -0,0 +1,234 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_14]: [cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_8]: [cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_24]: [cli_28]: [cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34847 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:12@isc17-c14] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:12@isc17-c14] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:12@isc17-c14] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:POSIX#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_13]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_12]: [cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_8]: [cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_23]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_24]: [cli_22]: [cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_4]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_7]: [cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34897 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:6@isc17-c07] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:6@isc17-c07] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:6@isc17-c07] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
|
@ -0,0 +1,598 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:POSIX#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 09:47:19 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 09:47:19 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258788941824.
|
||||
WARNING: Using actual aggregate bytes moved = 658624217088.
|
||||
write 4669 68935680 1024.00 0.001081 134.51 0.000669 134.52 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258653675520.
|
||||
WARNING: Using actual aggregate bytes moved = 668196667392.
|
||||
write 4746 68935680 1024.00 0.004385 134.26 0.001022 134.26 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258737561600.
|
||||
WARNING: Using actual aggregate bytes moved = 653849001984.
|
||||
write 4637 68935680 1024.00 0.002475 134.48 0.001501 134.48 2
|
||||
|
||||
Max Write: 4746.30 MiB/sec (4976.86 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 4746.30 4636.84 4684.20 45.89 134.41843 0 32 2 3 1 0 1 0 0 1 70590136320 1048576 658624217088 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 09:54:11 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:POSIX#T:1048576.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 09:54:31 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 09:54:31 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/indread16/file
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 289747763200.
|
||||
read 2301.58 68935680 1024.00 0.004985 120.06 0.004599 120.06 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 329581068288.
|
||||
read 2618.67 68935680 1024.00 0.004627 120.03 0.004544 120.03 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 353401569280.
|
||||
read 2808.00 68935680 1024.00 0.004710 120.02 0.004864 120.03 2
|
||||
|
||||
Max Read: 2808.00 MiB/sec (2944.40 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 2808.00 2301.58 2576.08 208.93 120.03717 0 32 2 3 1 0 1 0 0 1 70590136320 1048576 289747763200 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 10:00:31 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000006
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12459177 0xbe1ca9 0
|
||||
1 12640453 0xc0e0c5 0
|
||||
5 12380824 0xbcea98 0
|
||||
6 12594081 0xc02ba1 0
|
||||
3 12488763 0xbe903b 0
|
||||
4 12510552 0xbee558 0
|
||||
0 12422478 0xbd8d4e 0
|
||||
2 12568820 0xbfc8f4 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000023
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12568812 0xbfc8ec 0
|
||||
3 12488759 0xbe9037 0
|
||||
7 12459183 0xbe1caf 0
|
||||
0 12422476 0xbd8d4c 0
|
||||
5 12380829 0xbcea9d 0
|
||||
1 12640462 0xc0e0ce 0
|
||||
6 12594087 0xc02ba7 0
|
||||
4 12510556 0xbee55c 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000002
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594094 0xc02bae 0
|
||||
2 12568831 0xbfc8ff 0
|
||||
7 12459198 0xbe1cbe 0
|
||||
3 12488778 0xbe904a 0
|
||||
0 12422492 0xbd8d5c 0
|
||||
1 12640477 0xc0e0dd 0
|
||||
4 12510567 0xbee567 0
|
||||
5 12380846 0xbceaae 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000026
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12422468 0xbd8d44 0
|
||||
3 12488755 0xbe9033 0
|
||||
6 12594076 0xc02b9c 0
|
||||
5 12380819 0xbcea93 0
|
||||
1 12640451 0xc0e0c3 0
|
||||
2 12568811 0xbfc8eb 0
|
||||
7 12459178 0xbe1caa 0
|
||||
4 12510551 0xbee557 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000000
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594105 0xc02bb9 0
|
||||
0 12422498 0xbd8d62 0
|
||||
2 12568840 0xbfc908 0
|
||||
4 12510574 0xbee56e 0
|
||||
7 12459205 0xbe1cc5 0
|
||||
1 12640481 0xc0e0e1 0
|
||||
3 12488784 0xbe9050 0
|
||||
5 12380848 0xbceab0 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000020
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12568829 0xbfc8fd 0
|
||||
7 12459197 0xbe1cbd 0
|
||||
3 12488774 0xbe9046 0
|
||||
0 12422493 0xbd8d5d 0
|
||||
1 12640478 0xc0e0de 0
|
||||
6 12594103 0xc02bb7 0
|
||||
4 12510571 0xbee56b 0
|
||||
5 12380847 0xbceaaf 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000030
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488779 0xbe904b 0
|
||||
5 12380843 0xbceaab 0
|
||||
6 12594100 0xc02bb4 0
|
||||
0 12422494 0xbd8d5e 0
|
||||
2 12568836 0xbfc904 0
|
||||
4 12510569 0xbee569 0
|
||||
7 12459202 0xbe1cc2 0
|
||||
1 12640479 0xc0e0df 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000004
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488770 0xbe9042 0
|
||||
0 12422485 0xbd8d55 0
|
||||
4 12510561 0xbee561 0
|
||||
2 12568826 0xbfc8fa 0
|
||||
7 12459194 0xbe1cba 0
|
||||
6 12594092 0xc02bac 0
|
||||
1 12640468 0xc0e0d4 0
|
||||
5 12380837 0xbceaa5 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000007
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380836 0xbceaa4 0
|
||||
7 12459193 0xbe1cb9 0
|
||||
1 12640467 0xc0e0d3 0
|
||||
2 12568827 0xbfc8fb 0
|
||||
0 12422486 0xbd8d56 0
|
||||
4 12510564 0xbee564 0
|
||||
3 12488772 0xbe9044 0
|
||||
6 12594097 0xc02bb1 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000018
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594077 0xc02b9d 0
|
||||
5 12380821 0xbcea95 0
|
||||
4 12510547 0xbee553 0
|
||||
0 12422471 0xbd8d47 0
|
||||
2 12568814 0xbfc8ee 0
|
||||
7 12459182 0xbe1cae 0
|
||||
1 12640454 0xc0e0c6 0
|
||||
3 12488762 0xbe903a 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000031
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594078 0xc02b9e 0
|
||||
7 12459180 0xbe1cac 0
|
||||
5 12380826 0xbcea9a 0
|
||||
1 12640461 0xc0e0cd 0
|
||||
4 12510554 0xbee55a 0
|
||||
3 12488766 0xbe903e 0
|
||||
2 12568821 0xbfc8f5 0
|
||||
0 12422480 0xbd8d50 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000012
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12640470 0xc0e0d6 0
|
||||
2 12568830 0xbfc8fe 0
|
||||
4 12510565 0xbee565 0
|
||||
3 12488777 0xbe9049 0
|
||||
7 12459206 0xbe1cc6 0
|
||||
6 12594106 0xc02bba 0
|
||||
0 12422499 0xbd8d63 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000013
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12568813 0xbfc8ed 0
|
||||
7 12459181 0xbe1cad 0
|
||||
6 12594083 0xc02ba3 0
|
||||
1 12640459 0xc0e0cb 0
|
||||
0 12422477 0xbd8d4d 0
|
||||
4 12510553 0xbee559 0
|
||||
5 12380830 0xbcea9e 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000025
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12422488 0xbd8d58 0
|
||||
1 12640471 0xc0e0d7 0
|
||||
2 12568832 0xbfc900 0
|
||||
5 12380845 0xbceaad 0
|
||||
4 12510570 0xbee56a 0
|
||||
7 12459201 0xbe1cc1 0
|
||||
6 12594104 0xc02bb8 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000022
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12640472 0xc0e0d8 0
|
||||
3 12488775 0xbe9047 0
|
||||
5 12380842 0xbceaaa 0
|
||||
6 12594102 0xc02bb6 0
|
||||
0 12422495 0xbd8d5f 0
|
||||
2 12568837 0xbfc905 0
|
||||
4 12510572 0xbee56c 0
|
||||
7 12459203 0xbe1cc3 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000003
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594075 0xc02b9b 0
|
||||
4 12510544 0xbee550 0
|
||||
1 12640450 0xc0e0c2 0
|
||||
0 12422469 0xbd8d45 0
|
||||
7 12459176 0xbe1ca8 0
|
||||
2 12568809 0xbfc8e9 0
|
||||
3 12488756 0xbe9034 0
|
||||
5 12380822 0xbcea96 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000008
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12594093 0xc02bad 0
|
||||
1 12640469 0xc0e0d5 0
|
||||
4 12510563 0xbee563 0
|
||||
3 12488776 0xbe9048 0
|
||||
7 12459200 0xbe1cc0 0
|
||||
2 12568833 0xbfc901 0
|
||||
0 12422496 0xbd8d60 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000028
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12459195 0xbe1cbb 0
|
||||
4 12510562 0xbee562 0
|
||||
2 12568828 0xbfc8fc 0
|
||||
5 12380840 0xbceaa8 0
|
||||
0 12422489 0xbd8d59 0
|
||||
3 12488773 0xbe9045 0
|
||||
1 12640474 0xc0e0da 0
|
||||
6 12594096 0xc02bb0 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000010
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380838 0xbceaa6 0
|
||||
6 12594095 0xc02baf 0
|
||||
1 12640473 0xc0e0d9 0
|
||||
0 12422491 0xbd8d5b 0
|
||||
4 12510566 0xbee566 0
|
||||
7 12459199 0xbe1cbf 0
|
||||
3 12488780 0xbe904c 0
|
||||
2 12568835 0xbfc903 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000017
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488758 0xbe9036 0
|
||||
4 12510550 0xbee556 0
|
||||
5 12380825 0xbcea99 0
|
||||
0 12422475 0xbd8d4b 0
|
||||
1 12640456 0xc0e0c8 0
|
||||
2 12568818 0xbfc8f2 0
|
||||
6 12594085 0xc02ba5 0
|
||||
7 12459185 0xbe1cb1 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000029
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380823 0xbcea97 0
|
||||
3 12488760 0xbe9038 0
|
||||
6 12594080 0xc02ba0 0
|
||||
2 12568816 0xbfc8f0 0
|
||||
7 12459184 0xbe1cb0 0
|
||||
0 12422479 0xbd8d4f 0
|
||||
4 12510555 0xbee55b 0
|
||||
1 12640463 0xc0e0cf 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000016
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380839 0xbceaa7 0
|
||||
0 12422490 0xbd8d5a 0
|
||||
1 12640476 0xc0e0dc 0
|
||||
6 12594099 0xc02bb3 0
|
||||
4 12510568 0xbee568 0
|
||||
2 12568838 0xbfc906 0
|
||||
3 12488782 0xbe904e 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000005
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12510548 0xbee554 0
|
||||
0 12422472 0xbd8d48 0
|
||||
1 12640457 0xc0e0c9 0
|
||||
2 12568819 0xbfc8f3 0
|
||||
6 12594086 0xc02ba6 0
|
||||
7 12459188 0xbe1cb4 0
|
||||
3 12488767 0xbe903f 0
|
||||
5 12380832 0xbceaa0 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000014
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380834 0xbceaa2 0
|
||||
6 12594090 0xc02baa 0
|
||||
0 12422483 0xbd8d53 0
|
||||
2 12568824 0xbfc8f8 0
|
||||
4 12510559 0xbee55f 0
|
||||
7 12459191 0xbe1cb7 0
|
||||
1 12640465 0xc0e0d1 0
|
||||
3 12488769 0xbe9041 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000021
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488757 0xbe9035 0
|
||||
4 12510549 0xbee555 0
|
||||
0 12422473 0xbd8d49 0
|
||||
1 12640455 0xc0e0c7 0
|
||||
6 12594082 0xc02ba2 0
|
||||
5 12380828 0xbcea9c 0
|
||||
2 12568817 0xbfc8f1 0
|
||||
7 12459187 0xbe1cb3 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000019
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380844 0xbceaac 0
|
||||
6 12594101 0xc02bb5 0
|
||||
0 12422497 0xbd8d61 0
|
||||
2 12568839 0xbfc907 0
|
||||
4 12510573 0xbee56d 0
|
||||
7 12459204 0xbe1cc4 0
|
||||
1 12640480 0xc0e0e0 0
|
||||
3 12488783 0xbe904f 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000009
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12488768 0xbe9040 0
|
||||
5 12380833 0xbceaa1 0
|
||||
6 12594089 0xc02ba9 0
|
||||
0 12422482 0xbd8d52 0
|
||||
2 12568823 0xbfc8f7 0
|
||||
4 12510558 0xbee55e 0
|
||||
7 12459190 0xbe1cb6 0
|
||||
1 12640464 0xc0e0d0 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000015
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12640460 0xc0e0cc 0
|
||||
3 12488765 0xbe903d 0
|
||||
5 12380831 0xbcea9f 0
|
||||
6 12594088 0xc02ba8 0
|
||||
0 12422481 0xbd8d51 0
|
||||
2 12568822 0xbfc8f6 0
|
||||
4 12510557 0xbee55d 0
|
||||
7 12459189 0xbe1cb5 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000027
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12459196 0xbe1cbc 0
|
||||
0 12422487 0xbd8d57 0
|
||||
5 12380841 0xbceaa9 0
|
||||
1 12640475 0xc0e0db 0
|
||||
6 12594098 0xc02bb2 0
|
||||
3 12488781 0xbe904d 0
|
||||
2 12568834 0xbfc902 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000024
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380820 0xbcea94 0
|
||||
4 12510546 0xbee552 0
|
||||
1 12640452 0xc0e0c4 0
|
||||
7 12459179 0xbe1cab 0
|
||||
2 12568815 0xbfc8ef 0
|
||||
0 12422474 0xbd8d4a 0
|
||||
3 12488764 0xbe903c 0
|
||||
6 12594084 0xc02ba4 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000001
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12380835 0xbceaa3 0
|
||||
2 12568825 0xbfc8f9 0
|
||||
6 12594091 0xc02bab 0
|
||||
3 12488771 0xbe9043 0
|
||||
0 12422484 0xbd8d54 0
|
||||
4 12510560 0xbee560 0
|
||||
7 12459192 0xbe1cb8 0
|
||||
1 12640466 0xc0e0d2 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000011
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12510545 0xbee551 0
|
||||
2 12568810 0xbfc8ea 0
|
||||
0 12422470 0xbd8d46 0
|
||||
6 12594079 0xc02b9f 0
|
||||
3 12488761 0xbe9039 0
|
||||
5 12380827 0xbcea9b 0
|
||||
1 12640458 0xc0e0ca 0
|
||||
7 12459186 0xbe1cb2 0
|
||||
|
|
@ -0,0 +1,591 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:POSIX#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 07:38:30 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:38:30 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258758533120.
|
||||
WARNING: Using actual aggregate bytes moved = 748106547200.
|
||||
write 5333 68935680 10240 0.001060 133.78 0.000872 133.78 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258548817920.
|
||||
WARNING: Using actual aggregate bytes moved = 742832209920.
|
||||
write 5316 68935680 10240 0.002029 133.27 0.001122 133.27 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258234245120.
|
||||
WARNING: Using actual aggregate bytes moved = 744604303360.
|
||||
write 5336 68935680 10240 0.008552 133.07 0.001609 133.08 2
|
||||
|
||||
Max Write: 5336.10 MiB/sec (5595.31 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 5336.10 5315.52 5328.16 9.03 133.37805 0 32 2 3 1 0 1 0 0 1 70590136320 10485760 748106547200 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 07:45:20 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 32 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:2#API:POSIX#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
Began: Tue Oct 23 07:45:40 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:45:40 2018
|
||||
Summary:
|
||||
api = POSIX
|
||||
test filename = /esfs/jtacquaviva/indread16/file
|
||||
access = file-per-process
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 32 (2 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 65.74 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 334443315200.
|
||||
read 2654.84 68935680 10240 0.004774 120.14 0.004286 120.14 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 381555834880.
|
||||
read 3028.61 68935680 10240 0.004292 120.15 0.004603 120.15 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 413537402880.
|
||||
read 3282.33 68935680 10240 0.004279 120.15 0.004211 120.15 2
|
||||
|
||||
Max Read: 3282.33 MiB/sec (3441.78 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 3282.33 2654.84 2988.59 257.73 120.14637 0 32 2 3 1 0 1 0 0 1 70590136320 10485760 334443315200 POSIX 0
|
||||
|
||||
Finished: Tue Oct 23 07:51:41 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000006
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12421486 0xbd896e 0
|
||||
2 12567835 0xbfc51b 0
|
||||
4 12509562 0xbee17a 0
|
||||
7 12458194 0xbe18d2 0
|
||||
1 12639466 0xc0dcea 0
|
||||
3 12487776 0xbe8c60 0
|
||||
5 12379834 0xbce6ba 0
|
||||
6 12593094 0xc027c6 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000023
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593070 0xc027ae 0
|
||||
0 12421464 0xbd8958 0
|
||||
2 12567813 0xbfc505 0
|
||||
4 12509542 0xbee166 0
|
||||
7 12458172 0xbe18bc 0
|
||||
3 12487754 0xbe8c4a 0
|
||||
1 12639446 0xc0dcd6 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000002
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509551 0xbee16f 0
|
||||
5 12379823 0xbce6af 0
|
||||
3 12487766 0xbe8c56 0
|
||||
2 12567825 0xbfc511 0
|
||||
7 12458186 0xbe18ca 0
|
||||
0 12421483 0xbd896b 0
|
||||
6 12593090 0xc027c2 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000026
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593067 0xc027ab 0
|
||||
0 12421460 0xbd8954 0
|
||||
2 12567810 0xbfc502 0
|
||||
4 12509535 0xbee15f 0
|
||||
7 12458168 0xbe18b8 0
|
||||
5 12379808 0xbce6a0 0
|
||||
1 12639440 0xc0dcd0 0
|
||||
3 12487748 0xbe8c44 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000000
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509552 0xbee170 0
|
||||
7 12458183 0xbe18c7 0
|
||||
3 12487765 0xbe8c55 0
|
||||
6 12593085 0xc027bd 0
|
||||
2 12567826 0xbfc512 0
|
||||
5 12379825 0xbce6b1 0
|
||||
1 12639457 0xc0dce1 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000020
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458177 0xbe18c1 0
|
||||
3 12487759 0xbe8c4f 0
|
||||
1 12639450 0xc0dcda 0
|
||||
2 12567819 0xbfc50b 0
|
||||
5 12379818 0xbce6aa 0
|
||||
0 12421471 0xbd895f 0
|
||||
4 12509546 0xbee16a 0
|
||||
6 12593079 0xc027b7 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000030
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12639451 0xc0dcdb 0
|
||||
6 12593081 0xc027b9 0
|
||||
0 12421475 0xbd8963 0
|
||||
5 12379821 0xbce6ad 0
|
||||
7 12458185 0xbe18c9 0
|
||||
3 12487767 0xbe8c57 0
|
||||
2 12567828 0xbfc514 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000004
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487761 0xbe8c51 0
|
||||
5 12379820 0xbce6ac 0
|
||||
2 12567823 0xbfc50f 0
|
||||
4 12509555 0xbee173 0
|
||||
0 12421479 0xbd8967 0
|
||||
6 12593089 0xc027c1 0
|
||||
1 12639460 0xc0dce4 0
|
||||
7 12458190 0xbe18ce 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000007
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509549 0xbee16d 0
|
||||
7 12458182 0xbe18c6 0
|
||||
5 12379822 0xbce6ae 0
|
||||
0 12421478 0xbd8966 0
|
||||
1 12639458 0xc0dce2 0
|
||||
2 12567832 0xbfc518 0
|
||||
3 12487772 0xbe8c5c 0
|
||||
6 12593091 0xc027c3 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000018
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509564 0xbee17c 0
|
||||
7 12458196 0xbe18d4 0
|
||||
1 12639468 0xc0dcec 0
|
||||
3 12487778 0xbe8c62 0
|
||||
5 12379836 0xbce6bc 0
|
||||
6 12593096 0xc027c8 0
|
||||
0 12421488 0xbd8970 0
|
||||
2 12567837 0xbfc51d 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000031
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593093 0xc027c5 0
|
||||
0 12421485 0xbd896d 0
|
||||
2 12567834 0xbfc51a 0
|
||||
4 12509561 0xbee179 0
|
||||
7 12458193 0xbe18d1 0
|
||||
1 12639465 0xc0dce9 0
|
||||
3 12487775 0xbe8c5f 0
|
||||
5 12379833 0xbce6b9 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000012
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379817 0xbce6a9 0
|
||||
2 12567820 0xbfc50c 0
|
||||
6 12593078 0xc027b6 0
|
||||
0 12421472 0xbd8960 0
|
||||
4 12509548 0xbee16c 0
|
||||
3 12487762 0xbe8c52 0
|
||||
7 12458181 0xbe18c5 0
|
||||
1 12639454 0xc0dcde 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000013
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379832 0xbce6b8 0
|
||||
6 12593092 0xc027c4 0
|
||||
0 12421484 0xbd896c 0
|
||||
2 12567833 0xbfc519 0
|
||||
4 12509560 0xbee178 0
|
||||
7 12458192 0xbe18d0 0
|
||||
1 12639464 0xc0dce8 0
|
||||
3 12487774 0xbe8c5e 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000025
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487751 0xbe8c47 0
|
||||
4 12509538 0xbee162 0
|
||||
1 12639443 0xc0dcd3 0
|
||||
2 12567812 0xbfc504 0
|
||||
6 12593073 0xc027b1 0
|
||||
7 12458173 0xbe18bd 0
|
||||
0 12421467 0xbd895b 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000022
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458179 0xbe18c3 0
|
||||
0 12421474 0xbd8962 0
|
||||
6 12593082 0xc027ba 0
|
||||
1 12639455 0xc0dcdf 0
|
||||
4 12509553 0xbee171 0
|
||||
5 12379824 0xbce6b0 0
|
||||
3 12487770 0xbe8c5a 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000003
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12567836 0xbfc51c 0
|
||||
4 12509563 0xbee17b 0
|
||||
7 12458195 0xbe18d3 0
|
||||
1 12639467 0xc0dceb 0
|
||||
3 12487777 0xbe8c61 0
|
||||
5 12379835 0xbce6bb 0
|
||||
6 12593095 0xc027c7 0
|
||||
0 12421487 0xbd896f 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000008
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458175 0xbe18bf 0
|
||||
1 12639448 0xc0dcd8 0
|
||||
3 12487757 0xbe8c4d 0
|
||||
5 12379815 0xbce6a7 0
|
||||
6 12593076 0xc027b4 0
|
||||
0 12421469 0xbd895d 0
|
||||
2 12567817 0xbfc509 0
|
||||
4 12509544 0xbee168 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000028
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487769 0xbe8c59 0
|
||||
5 12379828 0xbce6b4 0
|
||||
6 12593088 0xc027c0 0
|
||||
0 12421481 0xbd8969 0
|
||||
2 12567831 0xbfc517 0
|
||||
4 12509559 0xbee177 0
|
||||
7 12458191 0xbe18cf 0
|
||||
1 12639463 0xc0dce7 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000010
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509550 0xbee16e 0
|
||||
0 12421476 0xbd8964 0
|
||||
6 12593086 0xc027be 0
|
||||
3 12487768 0xbe8c58 0
|
||||
7 12458187 0xbe18cb 0
|
||||
2 12567827 0xbfc513 0
|
||||
5 12379829 0xbce6b5 0
|
||||
1 12639461 0xc0dce5 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000017
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458180 0xbe18c4 0
|
||||
6 12593083 0xc027bb 0
|
||||
2 12567824 0xbfc510 0
|
||||
4 12509554 0xbee172 0
|
||||
1 12639456 0xc0dce0 0
|
||||
5 12379830 0xbce6b6 0
|
||||
3 12487771 0xbe8c5b 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000029
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487749 0xbe8c45 0
|
||||
5 12379809 0xbce6a1 0
|
||||
2 12567811 0xbfc503 0
|
||||
7 12458169 0xbe18b9 0
|
||||
4 12509536 0xbee160 0
|
||||
1 12639441 0xc0dcd1 0
|
||||
0 12421462 0xbd8956 0
|
||||
6 12593068 0xc027ac 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000016
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379807 0xbce69f 0
|
||||
6 12593066 0xc027aa 0
|
||||
0 12421459 0xbd8953 0
|
||||
2 12567809 0xbfc501 0
|
||||
4 12509534 0xbee15e 0
|
||||
7 12458167 0xbe18b7 0
|
||||
1 12639439 0xc0dccf 0
|
||||
3 12487747 0xbe8c43 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000005
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379810 0xbce6a2 0
|
||||
3 12487752 0xbe8c48 0
|
||||
1 12639445 0xc0dcd5 0
|
||||
6 12593072 0xc027b0 0
|
||||
7 12458171 0xbe18bb 0
|
||||
0 12421466 0xbd895a 0
|
||||
2 12567814 0xbfc506 0
|
||||
4 12509543 0xbee167 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000014
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 7
|
||||
obdidx objid objid group
|
||||
7 12458178 0xbe18c2 0
|
||||
3 12487760 0xbe8c50 0
|
||||
2 12567821 0xbfc50d 0
|
||||
5 12379819 0xbce6ab 0
|
||||
4 12509547 0xbee16b 0
|
||||
6 12593080 0xc027b8 0
|
||||
0 12421473 0xbd8961 0
|
||||
1 12639452 0xc0dcdc 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000021
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12639444 0xc0dcd4 0
|
||||
7 12458170 0xbe18ba 0
|
||||
4 12509541 0xbee165 0
|
||||
5 12379812 0xbce6a4 0
|
||||
3 12487753 0xbe8c49 0
|
||||
6 12593074 0xc027b2 0
|
||||
0 12421468 0xbd895c 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000019
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379826 0xbce6b2 0
|
||||
6 12593087 0xc027bf 0
|
||||
0 12421480 0xbd8968 0
|
||||
2 12567830 0xbfc516 0
|
||||
4 12509558 0xbee176 0
|
||||
7 12458189 0xbe18cd 0
|
||||
1 12639462 0xc0dce6 0
|
||||
3 12487773 0xbe8c5d 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000009
|
||||
lmm_stripe_count: 7
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487764 0xbe8c54 0
|
||||
2 12567822 0xbfc50e 0
|
||||
5 12379831 0xbce6b7 0
|
||||
4 12509557 0xbee175 0
|
||||
7 12458188 0xbe18cc 0
|
||||
0 12421482 0xbd896a 0
|
||||
1 12639459 0xc0dce3 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000015
|
||||
lmm_stripe_count: 6
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12421463 0xbd8957 0
|
||||
4 12509540 0xbee164 0
|
||||
5 12379813 0xbce6a5 0
|
||||
3 12487755 0xbe8c4b 0
|
||||
2 12567815 0xbfc507 0
|
||||
6 12593075 0xc027b3 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000027
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12567818 0xbfc50a 0
|
||||
7 12458176 0xbe18c0 0
|
||||
0 12421470 0xbd895e 0
|
||||
1 12639449 0xc0dcd9 0
|
||||
6 12593077 0xc027b5 0
|
||||
3 12487758 0xbe8c4e 0
|
||||
5 12379816 0xbce6a8 0
|
||||
4 12509545 0xbee169 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000024
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 3
|
||||
obdidx objid objid group
|
||||
3 12487763 0xbe8c53 0
|
||||
1 12639453 0xc0dcdd 0
|
||||
6 12593084 0xc027bc 0
|
||||
7 12458184 0xbe18c8 0
|
||||
0 12421477 0xbd8965 0
|
||||
5 12379827 0xbce6b3 0
|
||||
4 12509556 0xbee174 0
|
||||
2 12567829 0xbfc515 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000001
|
||||
lmm_stripe_count: 6
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12421461 0xbd8955 0
|
||||
3 12487750 0xbe8c46 0
|
||||
6 12593069 0xc027ad 0
|
||||
4 12509537 0xbee161 0
|
||||
1 12639442 0xc0dcd2 0
|
||||
5 12379811 0xbce6a3 0
|
||||
|
||||
/esfs/jtacquaviva/ioperf/file_write.00000011
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 4
|
||||
obdidx objid objid group
|
||||
4 12509539 0xbee163 0
|
||||
6 12593071 0xc027af 0
|
||||
0 12421465 0xbd8959 0
|
||||
5 12379814 0xbce6a6 0
|
||||
7 12458174 0xbe18be 0
|
||||
1 12639447 0xc0dcd7 0
|
||||
3 12487756 0xbe8c4c 0
|
||||
2 12567816 0xbfc508 0
|
||||
|
|
@ -0,0 +1,445 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_12]: [cli_0]: [cli_9]: [cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
tee: standard output[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
: Resource temporarily unavailable
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34756 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:12@isc17-c14] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:12@isc17-c14] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:12@isc17-c14] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:7@isc17-c08] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:7@isc17-c08] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:7@isc17-c08] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:1@isc17-c02] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:1@isc17-c02] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:1@isc17-c02] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:15@isc17-c22] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:15@isc17-c22] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:15@isc17-c22] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:6@isc17-c07] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:6@isc17-c07] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:6@isc17-c07] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:5@isc17-c06] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:5@isc17-c06] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:5@isc17-c06] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:MPIIO#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_44]: [cli_0]: tee: standard output[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
: Resource temporarily unavailable
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_16]: [cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_31]: [cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_48]: [cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_57]: ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_56]: [cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_13]: [cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34816 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:2@isc17-c03] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:2@isc17-c03] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:2@isc17-c03] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
|
@ -0,0 +1,110 @@
|
|||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:MPIIO#T:1048576.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 09:34:50 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 09:34:50 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 64 (4 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 32.87 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258881216512.
|
||||
WARNING: Using actual aggregate bytes moved = 468432453632.
|
||||
write 3715 34467840 1024.00 0.019695 120.24 0.001310 120.26 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258867585024.
|
||||
WARNING: Using actual aggregate bytes moved = 468409384960.
|
||||
write 3715 34467840 1024.00 0.085990 120.16 0.001138 120.24 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 446452203520.
|
||||
write 3538.39 34467840 1024.00 0.100266 120.23 0.001391 120.33 2
|
||||
|
||||
Max Write: 3715.04 MiB/sec (3895.50 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 3715.04 3538.39 3656.04 83.19 120.27771 0 64 4 3 0 0 1 0 0 1 35295068160 1048576 468432453632 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 09:40:56 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:MPIIO#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 09:41:17 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 09:41:17 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 64 (4 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 32.87 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 187145650176.
|
||||
read 1484.64 34467840 1024.00 0.038724 120.18 0.001130 120.22 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 189027844096.
|
||||
read 1500.18 34467840 1024.00 0.002513 120.16 0.001278 120.17 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 190888017920.
|
||||
read 1514.82 34467840 1024.00 0.003354 120.17 0.001564 120.18 2
|
||||
|
||||
Max Read: 1514.82 MiB/sec (1588.40 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 1514.82 1484.64 1499.88 12.32 120.18591 0 64 4 3 0 0 1 0 0 1 35295068160 1048576 187145650176 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 09:47:17 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12422405 0xbd8d05 0
|
||||
2 12568747 0xbfc8ab 0
|
||||
4 12510482 0xbee512 0
|
||||
7 12459112 0xbe1c68 0
|
||||
1 12640385 0xc0e081 0
|
||||
3 12488691 0xbe8ff3 0
|
||||
5 12380754 0xbcea52 0
|
||||
6 12594011 0xc02b5b 0
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 07:26:02 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:26:02 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 64 (4 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 32.87 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258884362240.
|
||||
WARNING: Using actual aggregate bytes moved = 383600558080.
|
||||
write 3037.66 34467840 10240 0.018714 120.41 0.001233 120.43 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258800476160.
|
||||
WARNING: Using actual aggregate bytes moved = 373943173120.
|
||||
write 2961.27 34467840 10240 0.069654 120.36 0.001210 120.43 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258643189760.
|
||||
WARNING: Using actual aggregate bytes moved = 371342704640.
|
||||
write 2935.74 34467840 10240 0.101686 120.53 0.001531 120.63 2
|
||||
|
||||
Max Write: 3037.66 MiB/sec (3185.22 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 3037.66 2935.74 2978.22 43.30 120.49671 0 64 4 3 0 0 1 0 0 1 35295068160 10485760 383600558080 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 07:32:09 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 07:32:28 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:32:28 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 64 (4 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 32.87 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 395564810240.
|
||||
read 3136.47 34467840 10240 0.092180 120.19 0.004495 120.28 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 400650403840.
|
||||
read 3178.32 34467840 10240 0.005780 120.21 0.004368 120.22 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 400356802560.
|
||||
read 3174.71 34467840 10240 0.006326 120.26 0.004378 120.27 2
|
||||
|
||||
Max Read: 3178.32 MiB/sec (3332.71 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 3178.32 3136.47 3163.17 18.93 120.25298 0 64 4 3 0 0 1 0 0 1 35295068160 10485760 395564810240 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 07:38:29 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 2
|
||||
obdidx objid objid group
|
||||
2 12567743 0xbfc4bf 0
|
||||
4 12509471 0xbee11f 0
|
||||
7 12458104 0xbe1878 0
|
||||
1 12639378 0xc0dc92 0
|
||||
3 12487683 0xbe8c03 0
|
||||
5 12379744 0xbce660 0
|
||||
6 12593001 0xc02769 0
|
||||
0 12421396 0xbd8914 0
|
||||
|
|
@ -0,0 +1,438 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_0]: [cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_25]: [cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_45]: [cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_56]: [cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_8]: [cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_46]: [cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34002 RUNNING AT isc17-c11
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:3@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:3@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:3@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:15@isc17-c22] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:15@isc17-c22] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:15@isc17-c22] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:4#API:POSIX#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 64 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_40]: [cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_21]: [cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_16]: [cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_20]: [cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34724 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c01] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c01] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c01] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:14@isc17-c18] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:14@isc17-c18] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:14@isc17-c18] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:4@isc17-c05] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:4@isc17-c05] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:4@isc17-c05] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:5@isc17-c06] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:5@isc17-c06] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:5@isc17-c06] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,628 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
tee: standard output[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
: Resource temporarily unavailable
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_67]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 67
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_72]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_66]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 66
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_73]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 73
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_68]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 68
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
[cli_65]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 65
|
||||
[cli_74]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_69]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 69
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_75]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 75
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
[cli_70]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 70
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_76]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 76
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_71]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 71
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
[cli_77]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 77
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_6]: [cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_78]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 78
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_79]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 79
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
[cli_81]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 81
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_5]: [cli_82]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 82
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
[cli_83]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 83
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_84]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 84
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_85]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 85
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_86]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 86
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_87]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 87
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_89]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 89
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_24]: [cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
[cli_91]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 91
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
[cli_93]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 93
|
||||
[cli_94]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 94
|
||||
[cli_95]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 95
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 15953 RUNNING AT isc17-c15
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:3@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:3@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:MPIIO#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_69]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 69
|
||||
[cli_71]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 71
|
||||
[cli_67]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 67
|
||||
[cli_68]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 68
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_70]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 70
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_66]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 66
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_65]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 65
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_72]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
[cli_73]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 73
|
||||
[cli_74]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_75]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 75
|
||||
[cli_76]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 76
|
||||
[cli_77]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 77
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_84]: [cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_85]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 85
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_25]: [cli_86]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 86
|
||||
[cli_36]: [cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_24]: [cli_87]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 87
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_89]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 89
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 84
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_79]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 79
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_81]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 81
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_83]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 83
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_82]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 82
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_78]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 78
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_6]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_91]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 91
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_93]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 93
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_94]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 94
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_95]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 95
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 16028 RUNNING AT isc17-c15
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:9@isc17-c11] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:9@isc17-c11] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:9@isc17-c11] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:8@isc17-c09] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:8@isc17-c09] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:8@isc17-c09] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:2@isc17-c03] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:2@isc17-c03] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:2@isc17-c03] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:0@isc17-c01] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c01] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c01] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:MPIIO#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 09:09:04 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 09:09:04 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 96 (6 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 21.91 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258880167936.
|
||||
WARNING: Using actual aggregate bytes moved = 510574723072.
|
||||
write 4039 22978560 1024.00 0.357625 120.21 0.012212 120.55 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258882265088.
|
||||
WARNING: Using actual aggregate bytes moved = 505900171264.
|
||||
write 4012 22978560 1024.00 0.098760 120.17 0.011034 120.26 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258880167936.
|
||||
WARNING: Using actual aggregate bytes moved = 473096519680.
|
||||
write 3752 22978560 1024.00 0.058560 120.20 0.011193 120.25 2
|
||||
|
||||
Max Write: 4039.02 MiB/sec (4235.22 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 4039.02 3752.17 3934.32 129.28 120.35379 0 96 6 3 0 0 1 0 0 1 23530045440 1048576 510574723072 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 09:15:12 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:MPIIO#T:1048576.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 09:15:33 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 09:15:33 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 96 (6 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 21.91 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 215256924160.
|
||||
read 1702.23 22978560 1024.00 0.401618 120.20 0.023086 120.60 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 218279968768.
|
||||
read 1731.78 22978560 1024.00 0.018325 120.19 0.016253 120.20 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 219302330368.
|
||||
read 1739.75 22978560 1024.00 0.019019 120.21 0.016462 120.21 2
|
||||
|
||||
Max Read: 1739.75 MiB/sec (1824.26 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 1739.75 1702.23 1724.58 16.14 120.33906 0 96 6 3 0 0 1 0 0 1 23530045440 1048576 215256924160 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 09:21:34 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 1
|
||||
obdidx objid objid group
|
||||
1 12640190 0xc0dfbe 0
|
||||
3 12488498 0xbe8f32 0
|
||||
5 12380563 0xbce993 0
|
||||
6 12593819 0xc02a9b 0
|
||||
0 12422211 0xbd8c43 0
|
||||
2 12568557 0xbfc7ed 0
|
||||
4 12510290 0xbee452 0
|
||||
7 12458918 0xbe1ba6 0
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 07:00:20 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:00:20 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 96 (6 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 21.91 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258863390720.
|
||||
WARNING: Using actual aggregate bytes moved = 428615925760.
|
||||
write 3376.36 22978560 10240 0.359075 120.71 0.013151 121.07 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258748047360.
|
||||
WARNING: Using actual aggregate bytes moved = 427514920960.
|
||||
write 3384.04 22978560 10240 0.031607 120.46 0.012897 120.48 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258842419200.
|
||||
WARNING: Using actual aggregate bytes moved = 426078371840.
|
||||
write 3365.74 22978560 10240 0.129134 120.61 0.013151 120.73 2
|
||||
|
||||
Max Write: 3384.04 MiB/sec (3548.43 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 3384.04 3365.74 3375.38 7.51 120.75791 0 96 6 3 0 0 1 0 0 1 23530045440 10485760 428615925760 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 07:06:30 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 07:06:50 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 07:06:50 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 96 (6 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 21.91 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 414533550080.
|
||||
read 3273.29 22978560 10240 0.409388 120.38 0.014809 120.77 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 417626849280.
|
||||
read 3308.40 22978560 10240 0.017003 120.37 0.015016 120.38 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 422639042560.
|
||||
read 3346.21 22978560 10240 0.017556 120.44 0.015250 120.45 2
|
||||
|
||||
Max Read: 3346.21 MiB/sec (3508.75 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 3346.21 3273.29 3309.30 29.77 120.53718 0 96 6 3 0 0 1 0 0 1 23530045440 10485760 414533550080 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 07:12:52 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 5
|
||||
obdidx objid objid group
|
||||
5 12379549 0xbce59d 0
|
||||
6 12592809 0xc026a9 0
|
||||
0 12421203 0xbd8853 0
|
||||
2 12567552 0xbfc400 0
|
||||
4 12509278 0xbee05e 0
|
||||
7 12457915 0xbe17bb 0
|
||||
1 12639187 0xc0dbd3 0
|
||||
3 12487491 0xbe8b43 0
|
||||
|
|
@ -0,0 +1,632 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_84]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 84
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_12]: [cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_8]: [cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_75]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 75
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
tee: [cli_86]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 86
|
||||
standard output[cli_78]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 78
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_33]: [cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_72]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_68]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 68
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
[cli_89]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 89
|
||||
[cli_79]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 79
|
||||
: Resource temporarily unavailable
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_73]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 73
|
||||
[cli_50]: [cli_70]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 70
|
||||
[cli_91]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 91
|
||||
[cli_87]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 87
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_24]: [cli_74]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
[cli_66]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 66
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
[cli_82]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 82
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_76]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 76
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_67]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 67
|
||||
[cli_93]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 93
|
||||
[cli_83]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 83
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_65]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 65
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_77]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 77
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_71]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 71
|
||||
[cli_94]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 94
|
||||
[cli_85]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 85
|
||||
[cli_81]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 81
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_69]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 69
|
||||
[cli_95]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 95
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 15828 RUNNING AT isc17-c15
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:8@isc17-c09] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:8@isc17-c09] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:8@isc17-c09] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:9@isc17-c11] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:9@isc17-c11] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:9@isc17-c11] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:14@isc17-c18] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:14@isc17-c18] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:14@isc17-c18] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:6#API:POSIX#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 96 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_66]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 66
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_72]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_68]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 68
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_74]: [cli_70]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 70
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
[cli_76]: [cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 76
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_78]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
[cli_82]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 82
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 78
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_81]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 81
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_77]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 77
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_67]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 67
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_84]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 84
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_65]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 65
|
||||
[cli_75]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 75
|
||||
[cli_69]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 69
|
||||
[cli_86]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 86
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_83]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 83
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_73]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 73
|
||||
[cli_79]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 79
|
||||
[cli_71]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 71
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_37]: [cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_85]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 85
|
||||
[cli_30]: [cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_42]: [cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_6]: [cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_87]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 87
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_89]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 89
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
[cli_93]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 93
|
||||
[cli_94]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 94
|
||||
[cli_95]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 95
|
||||
[cli_91]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 91
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34458 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:11@isc17-c13] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:11@isc17-c13] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:11@isc17-c13] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:7@isc17-c08] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:7@isc17-c08] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:7@isc17-c08] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:6@isc17-c07] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:6@isc17-c07] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:6@isc17-c07] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,560 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_96]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 96
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_112]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 112
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_120]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 120
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_72]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_104]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 104
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
[cli_74]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_106]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 106
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
tee: [cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
standard output[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
: Resource temporarily unavailable
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 3230 RUNNING AT isc17-c22
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:13@isc17-c15] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:13@isc17-c15] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:13@isc17-c15] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
tee: [cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
standard output[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
: Resource temporarily unavailable
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_68]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 68
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_70]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 70
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_69]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 69
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_100]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 100
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_112]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 112
|
||||
[cli_71]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 71
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_101]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 101
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_116]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 116
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
[cli_102]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 102
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_104]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 104
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_118]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 118
|
||||
[cli_65]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 65
|
||||
[cli_93]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 93
|
||||
[cli_103]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 103
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_84]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 84
|
||||
[cli_108]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 108
|
||||
[cli_117]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 117
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_94]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 94
|
||||
[cli_96]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 96
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_86]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 86
|
||||
[cli_109]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 109
|
||||
[cli_119]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 119
|
||||
[cli_95]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 95
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
[cli_85]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 85
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_110]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 110
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_97]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 97
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_87]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 87
|
||||
[cli_111]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 111
|
||||
[cli_113]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 113
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_81]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 81
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_124]: [cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_126]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 126
|
||||
[cli_72]: [cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_127]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 127
|
||||
[cli_76]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 76
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_120]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 120
|
||||
[cli_77]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 77
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 124
|
||||
[cli_78]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 78
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_125]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 125
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_79]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 79
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_121]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 121
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_105]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 105
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_98]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 98
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_73]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 73
|
||||
[cli_66]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 66
|
||||
[cli_89]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 89
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_99]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 99
|
||||
[cli_114]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 114
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_67]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 67
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_122]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 122
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
[cli_106]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 106
|
||||
[cli_115]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 115
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_91]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 91
|
||||
[cli_123]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 123
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_59]: [cli_74]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_107]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 107
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_75]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 75
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_82]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 82
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
[cli_83]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 83
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 15767 RUNNING AT isc17-c15
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:7@isc17-c08] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:7@isc17-c08] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:7@isc17-c08] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:9@isc17-c11] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:9@isc17-c11] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:9@isc17-c11] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:10@isc17-c12] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:10@isc17-c12] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:10@isc17-c12] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:11@isc17-c13] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:11@isc17-c13] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:11@isc17-c13] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:12@isc17-c14] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:12@isc17-c14] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:12@isc17-c14] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:8@isc17-c09] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:8@isc17-c09] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:8@isc17-c09] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:14@isc17-c18] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:14@isc17-c18] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:14@isc17-c18] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:15@isc17-c22] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:15@isc17-c22] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:15@isc17-c22] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:MPIIO#T:1048576.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 08:43:13 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 08:43:13 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 128 (8 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 16.44 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258880167936.
|
||||
WARNING: Using actual aggregate bytes moved = 533859401728.
|
||||
write 4218 17233920 1024.00 0.502268 120.20 0.002527 120.70 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258882265088.
|
||||
WARNING: Using actual aggregate bytes moved = 532963917824.
|
||||
write 4225 17233920 1024.00 0.091345 120.21 0.000960 120.30 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258877022208.
|
||||
WARNING: Using actual aggregate bytes moved = 529912561664.
|
||||
write 4204 17233920 1024.00 0.011866 120.21 0.001168 120.22 2
|
||||
|
||||
Max Write: 4225.05 MiB/sec (4430.29 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 4225.05 4203.61 4215.56 8.92 120.40826 0 128 8 3 0 0 1 0 0 1 17647534080 1048576 533859401728 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 08:49:20 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:MPIIO#T:1048576.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 08:49:44 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 1048576 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 08:49:44 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 128 (8 per node)
|
||||
repetitions = 3
|
||||
xfersize = 1 MiB
|
||||
blocksize = 16.44 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 234833838080.
|
||||
read 1854.44 17233920 1024.00 0.572369 120.20 0.006264 120.77 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 237358809088.
|
||||
read 1882.75 17233920 1024.00 0.009623 120.22 0.006211 120.23 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 238922235904.
|
||||
read 1895.51 17233920 1024.00 0.009305 120.20 0.006212 120.21 2
|
||||
|
||||
Max Read: 1895.51 MiB/sec (1987.58 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 1895.51 1854.44 1877.57 17.16 120.40146 0 128 8 3 0 0 1 0 0 1 17647534080 1048576 234833838080 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 08:55:45 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 6
|
||||
obdidx objid objid group
|
||||
6 12593532 0xc0297c 0
|
||||
0 12421922 0xbd8b22 0
|
||||
2 12568270 0xbfc6ce 0
|
||||
4 12510003 0xbee333 0
|
||||
7 12458631 0xbe1a87 0
|
||||
1 12639904 0xc0dea0 0
|
||||
3 12488208 0xbe8e10 0
|
||||
5 12380274 0xbce872 0
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 06:34:35 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 06:34:35 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/ioperf/file_write
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 128 (8 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 16.44 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258873876480.
|
||||
WARNING: Using actual aggregate bytes moved = 477144023040.
|
||||
write 3760 17233920 10240 0.499358 120.52 0.000764 121.02 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258842419200.
|
||||
WARNING: Using actual aggregate bytes moved = 475445329920.
|
||||
write 3760 17233920 10240 0.030122 120.55 0.000970 120.59 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 2258831933440.
|
||||
WARNING: Using actual aggregate bytes moved = 470904995840.
|
||||
write 3718 17233920 10240 0.114969 120.66 0.001218 120.77 2
|
||||
|
||||
Max Write: 3760.16 MiB/sec (3942.81 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
write 3760.16 3718.40 3746.17 19.64 120.79439 0 128 8 3 0 0 1 0 0 1 17647534080 10485760 477144023040 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 06:40:44 2018
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:MPIIO#T:10485760.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior WARNING: fsync() only available in POSIX. Using value of 0.
|
||||
Began: Tue Oct 23 06:41:08 2018
|
||||
Command line used: /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 10485760 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
Machine: Linux isc17-c01
|
||||
|
||||
Test 0 started: Tue Oct 23 06:41:08 2018
|
||||
Summary:
|
||||
api = MPIIO (version=3, subversion=0)
|
||||
test filename = /esfs/jtacquaviva/file_read
|
||||
access = single-shared-file
|
||||
ordering in a file = random offsets
|
||||
ordering inter file= no tasks offsets
|
||||
clients = 128 (8 per node)
|
||||
repetitions = 3
|
||||
xfersize = 10 MiB
|
||||
blocksize = 16.44 GiB
|
||||
aggregate filesize = 2103.75 GiB
|
||||
Using stonewalling = 120 second(s)
|
||||
|
||||
access bw(MiB/s) block(KiB) xfer(KiB) open(s) wr/rd(s) close(s) total(s) iter
|
||||
------ --------- ---------- --------- -------- -------- -------- -------- ----
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 419566714880.
|
||||
read 3307.83 17233920 10240 0.562957 120.40 0.004821 120.96 0
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 423268188160.
|
||||
read 3349.56 17233920 10240 0.010286 120.50 0.004934 120.51 1
|
||||
WARNING: Expected aggregate file size = 2258884362240.
|
||||
WARNING: Stat() of aggregate file size = 4517768724480.
|
||||
WARNING: Using actual aggregate bytes moved = 424683765760.
|
||||
read 3361.33 17233920 10240 0.008095 120.49 0.004823 120.49 2
|
||||
|
||||
Max Read: 3361.33 MiB/sec (3524.62 MB/sec)
|
||||
|
||||
Summary of all tests:
|
||||
Operation Max(MiB) Min(MiB) Mean(MiB) StdDev Mean(s) Test# #Tasks tPN reps fPP reord reordoff reordrand seed segcnt blksiz xsize aggsize API RefNum
|
||||
read 3361.33 3307.83 3339.57 22.95 120.65556 0 128 8 3 0 0 1 0 0 1 17647534080 10485760 419566714880 MPIIO 0
|
||||
|
||||
Finished: Tue Oct 23 06:47:10 2018
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
||||
/esfs/jtacquaviva/ioperf/file_write
|
||||
lmm_stripe_count: 8
|
||||
lmm_stripe_size: 1048576
|
||||
lmm_pattern: 1
|
||||
lmm_layout_gen: 0
|
||||
lmm_stripe_offset: 0
|
||||
obdidx objid objid group
|
||||
0 12420917 0xbd8735 0
|
||||
2 12567264 0xbfc2e0 0
|
||||
4 12508987 0xbedf3b 0
|
||||
7 12457629 0xbe169d 0
|
||||
1 12638900 0xc0dab4 0
|
||||
3 12487205 0xbe8a25 0
|
||||
5 12379265 0xbce481 0
|
||||
6 12592522 0xc0258a 0
|
||||
|
|
@ -0,0 +1,501 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
[cli_96]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 96
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_48]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
[cli_112]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 112
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
[cli_72]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
[cli_104]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 104
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
[cli_120]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 120
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 34042 RUNNING AT isc17-c09
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:7@isc17-c08] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:7@isc17-c08] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:7@isc17-c08] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:11@isc17-c13] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:11@isc17-c13] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:11@isc17-c13] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:9@isc17-c11] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:9@isc17-c11] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:9@isc17-c11] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:10@isc17-c12] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:10@isc17-c12] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:10@isc17-c12] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:12@isc17-c14] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:12@isc17-c14] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:12@isc17-c14] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:14@isc17-c18] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:14@isc17-c18] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:14@isc17-c18] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:15@isc17-c22] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:15@isc17-c22] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:15@isc17-c22] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:2@isc17-c03] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:2@isc17-c03] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:2@isc17-c03] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[proxy:0:4@isc17-c05] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:4@isc17-c05] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:4@isc17-c05] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:16#PPN:8#API:POSIX#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 128 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c01,isc17-c02,isc17-c03,isc17-c04,isc17-c05,isc17-c06,isc17-c07,isc17-c08,isc17-c09,isc17-c11,isc17-c12,isc17-c13,isc17-c14,isc17-c15,isc17-c18,isc17-c22 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread16/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_64]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 64
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_32]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 32
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_80]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 80
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_48]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 48
|
||||
[cli_96]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 96
|
||||
[cli_112]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 112
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_16]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 16
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_88]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 88
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_72]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 72
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_120]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 120
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_40]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 40
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_44]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 44
|
||||
[cli_124]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 124
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_56]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 56
|
||||
[cli_84]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 84
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_116]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 116
|
||||
[cli_36]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 36
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_20]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 20
|
||||
[cli_76]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 76
|
||||
[cli_100]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 100
|
||||
[cli_92]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 92
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_60]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 60
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_52]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 52
|
||||
[cli_42]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 42
|
||||
[cli_122]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 122
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_82]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 82
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_114]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 114
|
||||
[cli_34]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 34
|
||||
[cli_50]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 50
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_18]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 18
|
||||
[cli_74]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 74
|
||||
[cli_98]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 98
|
||||
[cli_90]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 90
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_58]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 58
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_46]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 46
|
||||
[cli_126]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 126
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_86]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 86
|
||||
[cli_118]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 118
|
||||
[cli_38]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 38
|
||||
[cli_54]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 54
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_22]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 22
|
||||
[cli_78]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 78
|
||||
[cli_102]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 102
|
||||
[cli_94]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 94
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_62]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 62
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_106]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 106
|
||||
[cli_87]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 87
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_55]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 55
|
||||
[cli_23]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 23
|
||||
[cli_47]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 47
|
||||
[cli_127]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 127
|
||||
[cli_110]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 110
|
||||
[cli_119]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 119
|
||||
[cli_39]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 39
|
||||
[cli_7]: [cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_43]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 43
|
||||
[cli_123]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 123
|
||||
[cli_79]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 79
|
||||
[cli_103]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 103
|
||||
[cli_95]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 95
|
||||
[cli_104]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 104
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_63]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 63
|
||||
[cli_35]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 35
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_75]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 75
|
||||
[cli_99]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 99
|
||||
[cli_91]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 91
|
||||
[cli_108]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 108
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_59]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 59
|
||||
[cli_83]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 83
|
||||
[cli_115]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 115
|
||||
[cli_51]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 51
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_19]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 19
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_41]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 41
|
||||
[cli_121]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 121
|
||||
[cli_85]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 85
|
||||
[cli_117]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 117
|
||||
[cli_37]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 37
|
||||
[cli_53]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 53
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_21]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 21
|
||||
[cli_45]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 45
|
||||
[cli_125]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 125
|
||||
[cli_73]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 73
|
||||
[cli_97]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 97
|
||||
[cli_89]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 89
|
||||
[cli_111]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_57]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 57
|
||||
[cli_81]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 81
|
||||
[cli_113]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 113
|
||||
[cli_33]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 33
|
||||
[cli_49]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 49
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_17]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 17
|
||||
[cli_77]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 77
|
||||
[cli_101]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 101
|
||||
[cli_93]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 93
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 111
|
||||
[cli_61]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 61
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_107]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 107
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_105]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 105
|
||||
[cli_109]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 109
|
||||
[cli_26]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 26
|
||||
[cli_28]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 28
|
||||
[cli_30]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 30
|
||||
[cli_31]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 31
|
||||
[cli_24]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 24
|
||||
[cli_25]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 25
|
||||
[cli_27]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 27
|
||||
[cli_29]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 29
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 15529 RUNNING AT isc17-c15
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:7@isc17-c08] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:7@isc17-c08] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:7@isc17-c08] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:9@isc17-c11] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:9@isc17-c11] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:9@isc17-c11] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:10@isc17-c12] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:10@isc17-c12] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:10@isc17-c12] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:11@isc17-c13] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:11@isc17-c13] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:11@isc17-c13] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:12@isc17-c14] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:12@isc17-c14] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:12@isc17-c14] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:8@isc17-c09] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:8@isc17-c09] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:8@isc17-c09] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:14@isc17-c18] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:14@isc17-c18] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:14@isc17-c18] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:15@isc17-c22] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:15@isc17-c22] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:15@isc17-c22] main (pm/pmiserv/pmip.c:206): demux engine error waiting for event
|
||||
[proxy:0:0@isc17-c01] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c01] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c01] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 32 stripe_size: 1048576 stripe_offset: -1
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue